diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 00000000..19528760
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+[run]
+branch = True
+omit =
+ google/__init__.py
+ google/cloud/__init__.py
+
+[report]
+fail_under = 100
+show_missing = True
+exclude_lines =
+ # Re-enable the standard pragma
+ pragma: NO COVER
+ # Ignore debug-only repr
+ def __repr__
+ # Ignore abstract methods
+ raise NotImplementedError
+omit =
+ */gapic/*.py
+ */proto/*.py
+ */core/*.py
+ */site-packages/*.py
+ google/cloud/__init__.py
diff --git a/.flake8 b/.flake8
new file mode 100644
index 00000000..32986c79
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+[flake8]
+ignore = E203, E231, E266, E501, W503
+exclude =
+ # Exclude generated code.
+ **/proto/**
+ **/gapic/**
+ **/services/**
+ **/types/**
+ *_pb2.py
+
+ # Standard linting exemptions.
+ **/.nox/**
+ __pycache__,
+ .git,
+ *.pyc,
+ conf.py
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
new file mode 100644
index 00000000..6d064ddb
--- /dev/null
+++ b/.github/.OwlBot.lock.yaml
@@ -0,0 +1,17 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407
+# created: 2024-07-31T14:52:44.926548819Z
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
new file mode 100644
index 00000000..52afa41a
--- /dev/null
+++ b/.github/.OwlBot.yaml
@@ -0,0 +1,19 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+
+
+begin-after-commit-hash: 16620c2159ec0ec0d9b28db56e61d9ad09355a2a
\ No newline at end of file
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 30c3973a..e446644f 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -3,9 +3,10 @@
#
# For syntax help see:
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
-# The @googleapis/yoshi-python is the default owner for changes in this repo
-* @googleapis/yoshi-python
+# @googleapis/yoshi-python is the default owner for changes in this repo
+* @googleapis/yoshi-python
-# The python-samples-reviewers team is the default owner for samples changes
-/samples/ @googleapis/python-samples-owners
\ No newline at end of file
+# @googleapis/python-samples-reviewers is the default owner for samples changes
+/samples/ @googleapis/python-samples-reviewers
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 00000000..939e5341
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+
+## Community Guidelines
+
+This project follows [Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 00000000..fe665c63
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,43 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+Please run down the following list and make sure you've tried the usual "quick fixes":
+
+ - Search the issues already opened: https://github.com/googleapis/python-crc32c/issues
+ - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python
+
+If you are still having issues, please be sure to include as much information as possible:
+
+#### Environment details
+
+ - OS type and version:
+ - Python version: `python --version`
+ - pip version: `pip --version`
+ - `google-crc32c` version: `pip show google-crc32c`
+
+#### Steps to reproduce
+
+ 1. ?
+ 2. ?
+
+#### Code example
+
+```python
+# example
+```
+
+#### Stack trace
+```
+# example
+```
+
+Making sure to follow these steps will guarantee the quickest resolution possible.
+
+Thanks!
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000..6365857f
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,18 @@
+---
+name: Feature request
+about: Suggest an idea for this library
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+ **Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+ **Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+ **Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+ **Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md
new file mode 100644
index 00000000..99586903
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/support_request.md
@@ -0,0 +1,7 @@
+---
+name: Support request
+about: If you have a support contract with Google, please create an issue in the Google Cloud Support console.
+
+---
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 00000000..1c5bfdf2
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,7 @@
+Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
+- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-crc32c/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea
+- [ ] Ensure the tests and linter pass
+- [ ] Code coverage does not decrease (if any source code was changed)
+- [ ] Appropriate docs were updated (if necessary)
+
+Fixes # 🦕
diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml
new file mode 100644
index 00000000..311ebbb8
--- /dev/null
+++ b/.github/auto-approve.yml
@@ -0,0 +1,3 @@
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve
+processes:
+ - "OwlBotTemplateChanges"
diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml
new file mode 100644
index 00000000..21786a4e
--- /dev/null
+++ b/.github/auto-label.yaml
@@ -0,0 +1,20 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+requestsize:
+ enabled: true
+
+path:
+ pullrequest: true
+ paths:
+ samples: "samples"
diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
new file mode 100644
index 00000000..990b3114
--- /dev/null
+++ b/.github/blunderbuss.yml
@@ -0,0 +1,17 @@
+# Blunderbuss config
+#
+# This file controls who is assigned for pull requests and issues.
+# Note: This file is autogenerated. To make changes to the assignee
+# team, please update `codeowner_team` in `.repo-metadata.json`.
+
+assign_issues:
+ - googleapis/python-core-client-libraries
+
+assign_issues_by:
+ - labels:
+ - "samples"
+ to:
+ - googleapis/python-samples-reviewers
+
+assign_prs:
+ - googleapis/python-core-client-libraries
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 00000000..6fe78aa7
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 00000000..e69de29b
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index 99092643..b8e8a906 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -28,9 +28,10 @@ branchProtectionRules:
requiresStrictStatusChecks: true
# List of required status check contexts that must pass for commits to be accepted to matching branches.
requiredStatusCheckContexts:
- - 'ubuntu-20.04'
- - 'macos-10.15'
- - 'windows-2019 ( x64 )'
+ - 'Kokoro - Linux'
+ - 'Kokoro - OS X'
+ - 'Kokoro - OS X Apple Silicon'
+ - 'Kokoro - Windows'
- 'cla/google'
# List of explicit permissions to add (additive only)
permissionRules:
diff --git a/.github/workflows/presubmit.yml b/.github/workflows/presubmit.yml
index 52c44c76..66626f0d 100644
--- a/.github/workflows/presubmit.yml
+++ b/.github/workflows/presubmit.yml
@@ -5,14 +5,16 @@ on:
branches:
- main
-jobs:
+permissions:
+ contents: read
+jobs:
mypy:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3.9"
- name: Install nox
@@ -25,195 +27,3 @@ jobs:
SETUPTOOLS_ENABLE_FEATURES: legacy-editable
run: |
nox -s mypy
-
- build-wheels-linux:
- name: ${{ matrix.os }}
- strategy:
- matrix:
- os:
- - ubuntu-20.04
- runs-on: ${{ matrix.os }}
-
- steps:
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- # For presubmit, just build / test the most common arch
- CIBW_ARCHS: native
- # For presubmit, skip build / test for pypy
- CIBW_SKIP: pp37*
- CIBW_ENVIRONMENT: >
- CRC32C_PURE_PYTHON="0"
- CRC32C_INSTALL_PREFIX="$(pwd)/usr"
- CIBW_BUILD_VERBOSITY: 1
- # Build the C library inside CIBW so that the manylinux image is
- # used to link the share library; otherwise, our extension wouldn't
- # be able to link with it.
- CIBW_BEFORE_BUILD: >
- python -m pip install --upgrade setuptools pip wheel &&
- python -m pip install cmake &&
- cmake -S google_crc32c -B build \
- -DCMAKE_BUILD_TYPE=Release \
- -DCRC32C_BUILD_TESTS=no \
- -DCRC32C_BUILD_BENCHMARKS=no \
- -DBUILD_SHARED_LIBS=yes \
- -DCMAKE_INSTALL_PREFIX:PATH=$(pwd)/usr &&
- make -C build all install
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests/
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
-
- build-wheels-macos:
- name: ${{ matrix.os }}
- strategy:
- matrix:
- os:
- - macos-10.15
- # Wheels port forward
- #- macos-11
- runs-on: ${{ matrix.os }}
- steps:
-
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Get C library hash
- id: get-c-lib-hash
- run:
- echo "::set-output name=hash::$(git -C google_crc32c log -n 1 --pretty=%H)"
-
- - id: load-cache
- name: Load cached C library
- uses: actions/cache@v3
- with:
- path: ${{ github.workspace }}/usr
- key:
- libcrc32c-${{ matrix.os }}-${{ steps.get-c-lib-hash.outputs.hash }}
-
- - name: Build C Library
- if: steps.load-cache.outputs.cache-hit != 'true'
- run: >
- python -m pip install --upgrade setuptools pip wheel &&
- python -m pip install cmake &&
- cmake -S google_crc32c -B build \
- -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=10.9 \
- -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \
- -DCRC32C_BUILD_TESTS=no \
- -DCRC32C_BUILD_BENCHMARKS=no \
- -DBUILD_SHARED_LIBS=yes \
- -DCMAKE_INSTALL_PREFIX:PATH=${{ github.workspace }}/usr \
- -DCMAKE_INSTALL_NAME_DIR:PATH=${{ github.workspace }}/usr/lib &&
- make -C build all install
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- # For presubmit, just build / test the most common arch
- CIBW_ARCHS: native
- # For presubmit, skip build / test for pypy
- CIBW_SKIP: pp37*
- CIBW_ENVIRONMENT: >
- CRC32C_PURE_PYTHON="0"
- CRC32C_INSTALL_PREFIX="$(pwd)/usr"
- CIBW_BUILD_VERBOSITY: 1
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests/
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
-
- build-wheels-windows:
- name: ${{ matrix.os }} ( ${{ matrix.platform }} )
-
- strategy:
- matrix:
- os:
- - windows-2019
- platform:
- # For presubmit, just build / test the most common platform
- - x64
- #- Win32
-
- runs-on: ${{ matrix.os }}
-
- steps:
-
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Get C library hash
- id: get-c-lib-hash
- run:
- echo "::set-output name=hash::$(git -C google_crc32c log -n 1 --pretty=%H)"
-
- - id: load-cache
- name: Load cached C library
- uses: actions/cache@v3
- with:
- path: ${{ github.workspace }}\usr
- key:
- libcrc32c-${{ matrix.os }}-${{ matrix.platform }}-${{ steps.get-c-lib-hash.outputs.hash }}
-
- - name: Build C Library
- if: steps.load-cache.outputs.cache-hit != 'true'
- run: |
- echo "::group::Install cmake"
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install cmake
- echo "::endgroup::"
- echo "::group::Run cmake to initialze build tree"
- cmake -S google_crc32c -B build -G "Visual Studio 16 2019" -A ${{ matrix.platform }} -DCRC32C_BUILD_BENCHMARKS=no -DCRC32C_BUILD_TESTS=no -DBUILD_SHARED_LIBS=no -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=yes -DCRC32C_USE_GLOG=0
- echo "::endgroup::"
- echo "::group::Run cmake to build library"
- cmake --build build --verbose --config Release
- echo "::endgroup::"
- echo "::group::Run cmake to install library"
- cmake --install build --verbose --config Release --prefix=${{ github.workspace }}\usr\
- echo "::endgroup::"
-
- # Passing through ${{ github.workspace }} to CIBW_ENVIRONMENT mangles
- # backslashes: compute a variant which uses only forward-slashses.
- - id: crc32c-install-prefix
- name: Dead-reckon a CIBW-compatible install prefix
- shell: bash
- run: |
- python -c "import os; workspace = '/'.join(os.getenv('GITHUB_WORKSPACE').split(os.sep)); pfx = f'{workspace}/usr'; print(f'::set-output name=prefix::{pfx}')"
-
- - id: platform-arch
- name: Map platform -> wheel arch
- shell: bash
- run: |
- if [[ "${{ matrix.platform }}" == "Win32" ]]; then
- echo "::set-output name=arch::x86"
- else
- echo "::set-output name=arch::AMD64"
- fi
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- CIBW_ARCHS_WINDOWS: ${{ steps.platform-arch.outputs.arch }}
- # For presubmit, skip build / test for pypy
- CIBW_SKIP: pp37*
- CIBW_ENVIRONMENT: CRC32C_PURE_PYTHON="0" CRC32C_INSTALL_PREFIX="${{ steps.crc32c-install-prefix.outputs.prefix }}"
- CIBW_BUILD_VERBOSITY: 3
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml
deleted file mode 100644
index bb5c3e78..00000000
--- a/.github/workflows/python-publish.yml
+++ /dev/null
@@ -1,298 +0,0 @@
-
-name: Build binary wheels and upload to PyPI
-
-on:
- release:
- types: [created]
- schedule:
- # Build nightly at 05:43 UTC
- - cron: '43 5 * * *'
- workflow_dispatch: # allow running manually from 'Actions' tab
-
-jobs:
- build-source-distribution:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Set up Python
- uses: actions/setup-python@v4
- - name: Build
- run: python setup.py sdist
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: dist/
-
- build-wheels-manylinux1:
- # Note: the 'manylinux1' image will no longer be supported by PyPA
- # after 2022-01-01.
- # See: https://github.com/pypa/manylinux/issues/994
- name: Build wheels on ubuntu-20.04 - x86_64 - manylinux1
-
- runs-on: ubuntu-20.04
-
- steps:
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- CIBW_ARCHS_LINUX: x86_64
- CIBW_MANYLINUX_X86_64_IMAGE: manylinux1
- CIBW_BUILD: "cp3[6-9]-manylinux*" # manylinux1 has no support for 3.10+ Python or PyPy
- CIBW_ENVIRONMENT: >
- CRC32C_PURE_PYTHON="0"
- CRC32C_INSTALL_PREFIX="$(pwd)/usr"
- CIBW_BUILD_VERBOSITY: 1
- # Build the C library inside CIBW so that the manylinux image is
- # used to link the share library; otherwise, our extension wouldn't
- # be able to link with it.
- CIBW_BEFORE_BUILD: >
- python -m pip install --upgrade setuptools pip wheel &&
- python -m pip install --only-binary cmake cmake &&
- cmake -S google_crc32c -B build \
- -DCMAKE_BUILD_TYPE=Release \
- -DCRC32C_BUILD_TESTS=no \
- -DCRC32C_BUILD_BENCHMARKS=no \
- -DBUILD_SHARED_LIBS=yes \
- -DCMAKE_INSTALL_PREFIX:PATH=$(pwd)/usr &&
- make -C build all install
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests/
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
-
- build-wheels-linux:
- name: Build wheels on ${{ matrix.os }} - ${{ matrix.arch }}
-
- strategy:
- matrix:
- os:
- - ubuntu-20.04
- arch:
- - x86_64
- - i686
- - aarch64
-
- runs-on: ${{ matrix.os }}
-
- steps:
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Set up QEMU for ARM64 cross compile
- if: ${{ matrix.arch }} == 'aarch64'
- id: qemu
- uses: docker/setup-qemu-action@v2
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- CIBW_ARCHS_LINUX: ${{ matrix.arch }}
- CIBW_ENVIRONMENT: >
- CRC32C_PURE_PYTHON="0"
- CRC32C_INSTALL_PREFIX="$(pwd)/usr"
- CIBW_BUILD_VERBOSITY: 1
- # Build the C library inside CIBW so that the manylinux image is
- # used to link the share library; otherwise, our extension wouldn't
- # be able to link with it.
- CIBW_BEFORE_BUILD: >
- python -m pip install --upgrade setuptools pip wheel &&
- python -m pip install cmake &&
- cmake -S google_crc32c -B build \
- -DCMAKE_BUILD_TYPE=Release \
- -DCRC32C_BUILD_TESTS=no \
- -DCRC32C_BUILD_BENCHMARKS=no \
- -DBUILD_SHARED_LIBS=yes \
- -DCMAKE_INSTALL_PREFIX:PATH=$(pwd)/usr &&
- make -C build all install
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests/
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
-
- build-wheels-macos:
- name: Build wheels on ${{ matrix.os }} - ${{ matrix.arch }}
- strategy:
- matrix:
- os:
- - macos-10.15
- #- macos-11
- arch:
- - x86_64
- - universal2
- runs-on: ${{ matrix.os }}
- steps:
-
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Get C library hash
- id: get-c-lib-hash
- run:
- echo "::set-output name=hash::$(git -C google_crc32c log -n 1 --pretty=%H)"
-
- - id: load-cache
- name: Load cached C library
- uses: actions/cache@v3
- with:
- path: ${{ github.workspace }}/usr
- key:
- libcrc32c-${{ matrix.os }}-${{ steps.get-c-lib-hash.outputs.hash }}
-
- - name: Build C Library
- if: steps.load-cache.outputs.cache-hit != 'true'
- run: >
- python -m pip install --upgrade setuptools pip wheel &&
- python -m pip install cmake &&
- cmake -S google_crc32c -B build \
- -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=10.9 \
- -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \
- -DCRC32C_BUILD_TESTS=no \
- -DCRC32C_BUILD_BENCHMARKS=no \
- -DBUILD_SHARED_LIBS=yes \
- -DCMAKE_INSTALL_PREFIX:PATH=${{ github.workspace }}/usr \
- -DCMAKE_INSTALL_NAME_DIR:PATH=${{ github.workspace }}/usr/lib &&
- make -C build all install
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- CIBW_ARCHS_MACOS: ${{ matrix.arch }}
- CIBW_ENVIRONMENT: >
- CRC32C_PURE_PYTHON="0"
- CRC32C_INSTALL_PREFIX="$(pwd)/usr"
- CIBW_BUILD_VERBOSITY: 1
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests/
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
-
- build-wheels-windows:
- name: Build wheels on ${{ matrix.os }} ( ${{ matrix.platform }} )
-
- strategy:
- matrix:
- os:
- - windows-2019
- platform:
- - x64
- - Win32
-
- runs-on: ${{ matrix.os }}
-
- steps:
-
- - uses: actions/checkout@v3
- with:
- submodules: 'recursive'
-
- - name: Get C library hash
- id: get-c-lib-hash
- run:
- echo "::set-output name=hash::$(git -C google_crc32c log -n 1 --pretty=%H)"
-
- - id: load-cache
- name: Load cached C library
- uses: actions/cache@v3
- with:
- path: ${{ github.workspace }}\usr
- key:
- libcrc32c-${{ matrix.os }}-${{ matrix.platform }}-${{ steps.get-c-lib-hash.outputs.hash }}
-
- - name: Build C Library
- if: steps.load-cache.outputs.cache-hit != 'true'
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install cmake
- cmake -S google_crc32c -B build -G "Visual Studio 16 2019" -A ${{ matrix.platform }} -DCRC32C_BUILD_BENCHMARKS=no -DCRC32C_BUILD_TESTS=no -DBUILD_SHARED_LIBS=no -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=yes -DCRC32C_USE_GLOG=0
- cmake --build build --verbose --config Release
- cmake --install build --verbose --config Release --prefix=${{ github.workspace }}\usr\
-
- # Passing through ${{ github.workspace }} to CIBW_ENVIRONMENT mangles
- # backslashes: compute a variant which uses only forward-slashses.
- - id: crc32c-install-prefix
- name: Dead-reckon a CIBW-compatible install prefix
- shell: bash
- run: |
- python -c "import os; workspace = '/'.join(os.getenv('GITHUB_WORKSPACE').split(os.sep)); pfx = f'{workspace}/usr'; print(f'::set-output name=prefix::{pfx}')"
-
- - id: platform-arch
- name: Map platform -> wheel arch
- shell: bash
- run: |
- if [[ "${{ matrix.platform }}" == "Win32" ]]; then
- echo "::set-output name=arch::x86"
- else
- echo "::set-output name=arch::AMD64"
- fi
-
- - name: Build Wheels
- uses: pypa/cibuildwheel@v2.9.0
- env:
- CIBW_ARCHS_WINDOWS: ${{ steps.platform-arch.outputs.arch }}
- CIBW_ENVIRONMENT: >
- CRC32C_INSTALL_PREFIX="${{ steps.crc32c-install-prefix.outputs.prefix }}"
- CRC32C_PURE_PYTHON="0"
- CIBW_BUILD_VERBOSITY: 1
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: py.test -v {project}/tests
-
- - uses: actions/upload-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
-
-
- publish:
- if: github.event_name == 'release'
- needs:
- - build-source-distribution
- - build-wheels-manylinux1
- - build-wheels-linux
- - build-wheels-macos
- - build-wheels-windows
- runs-on: ubuntu-latest
- steps:
- - name: Download all the dists
- uses: actions/download-artifact@v3
- with:
- name: python-package-distributions
- path: dist/
- - name: Download all the wheels
- uses: actions/download-artifact@v3
- with:
- name: python-package-distributions
- path: wheelhouse/
- - name: What will we publish?
- run: ls wheelhouse/
- - name: Publish Source Distribution
- uses: pypa/gh-action-pypi-publish@master
- with:
- user: __token__
- password: ${{ secrets.PYPI_API_TOKEN }}
- packages_dir: dist/
- skip_existing: true
- # repository_url: https://test.pypi.org/legacy/
- - name: Publish Wheels
- uses: pypa/gh-action-pypi-publish@master
- with:
- user: __token__
- password: ${{ secrets.PYPI_API_TOKEN }}
- packages_dir: wheelhouse/
- skip_existing: true
- # repository_url: https://test.pypi.org/legacy/
diff --git a/.kokoro/build-manylinux.sh b/.kokoro/build-manylinux.sh
new file mode 100755
index 00000000..90160c4d
--- /dev/null
+++ b/.kokoro/build-manylinux.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+cd github/python-crc32c
+
+# Before running nox and such, build the extension.
+./scripts/manylinux/build.sh
+./scripts/manylinux/check.sh
diff --git a/.kokoro/build-osx.sh b/.kokoro/build-osx.sh
new file mode 100755
index 00000000..4e04cdb6
--- /dev/null
+++ b/.kokoro/build-osx.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+cd github/python-crc32c
+
+# Before running nox and such, build the extension.
+./scripts/osx/build.sh
+./scripts/osx/check.sh
diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg
new file mode 100644
index 00000000..bf66b2f3
--- /dev/null
+++ b/.kokoro/continuous/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/build.sh"
+}
diff --git a/.kokoro/continuous/continuous-linux.cfg b/.kokoro/continuous/continuous-linux.cfg
new file mode 100644
index 00000000..c0f213b7
--- /dev/null
+++ b/.kokoro/continuous/continuous-linux.cfg
@@ -0,0 +1,8 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+build_file: "python-crc32c/.kokoro/build-manylinux.sh"
diff --git a/.kokoro/continuous/continuous-osx-slcn.cfg b/.kokoro/continuous/continuous-osx-slcn.cfg
new file mode 100644
index 00000000..81c893d2
--- /dev/null
+++ b/.kokoro/continuous/continuous-osx-slcn.cfg
@@ -0,0 +1,9 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/build-osx.sh"
diff --git a/.kokoro/continuous/continuous-osx.cfg b/.kokoro/continuous/continuous-osx.cfg
new file mode 100644
index 00000000..81c893d2
--- /dev/null
+++ b/.kokoro/continuous/continuous-osx.cfg
@@ -0,0 +1,9 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/build-osx.sh"
diff --git a/.kokoro/continuous/continuous-windows.cfg b/.kokoro/continuous/continuous-windows.cfg
new file mode 100644
index 00000000..8db3dd41
--- /dev/null
+++ b/.kokoro/continuous/continuous-windows.cfg
@@ -0,0 +1,9 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/presubmit.bat"
diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/continuous/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg
new file mode 100644
index 00000000..3595fb43
--- /dev/null
+++ b/.kokoro/continuous/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 00000000..e5410e29
--- /dev/null
+++ b/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,89 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:24.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+
+###################### Install python 3.10.14 for docs/docfx session
+
+# Download python 3.10.14
+RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz
+
+# Extract files
+RUN tar -xvf Python-3.10.14.tgz
+
+# Install python 3.10.14
+RUN ./Python-3.10.14/configure --enable-optimizations
+RUN make altinstall
+
+ENV PATH /usr/local/bin/python3.10:$PATH
+
+###################### Install pip
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.10 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+# Test pip
+RUN python3.10 -m pip
+
+# Install build requirements
+COPY requirements.txt /requirements.txt
+RUN python3.10 -m pip install --require-hashes -r requirements.txt
+
+CMD ["python3.10"]
diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in
new file mode 100644
index 00000000..816817c6
--- /dev/null
+++ b/.kokoro/docker/docs/requirements.in
@@ -0,0 +1 @@
+nox
diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt
new file mode 100644
index 00000000..7129c771
--- /dev/null
+++ b/.kokoro/docker/docs/requirements.txt
@@ -0,0 +1,42 @@
+#
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
+#
+# pip-compile --allow-unsafe --generate-hashes requirements.in
+#
+argcomplete==3.4.0 \
+ --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
+ --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
+ # via nox
+colorlog==6.8.2 \
+ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
+ --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
+ # via nox
+distlib==0.3.8 \
+ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
+ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
+ # via virtualenv
+filelock==3.15.4 \
+ --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
+ --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
+ # via virtualenv
+nox==2024.4.15 \
+ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
+ --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
+ # via -r requirements.in
+packaging==24.1 \
+ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
+ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
+ # via nox
+platformdirs==4.2.2 \
+ --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
+ --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
+ # via virtualenv
+tomli==2.0.1 \
+ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
+ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
+ # via nox
+virtualenv==20.26.3 \
+ --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
+ --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
+ # via nox
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
new file mode 100644
index 00000000..479f3a32
--- /dev/null
+++ b/.kokoro/docs/common.cfg
@@ -0,0 +1,67 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/publish-docs.sh"
+}
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "docs-staging"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ # Push non-cloud library docs to `docs-staging-v2-staging` instead of the
+ # Cloud RAD bucket `docs-staging-v2`
+ value: "docs-staging-v2-staging"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "docuploader_service_account"
+ }
+ }
+}
\ No newline at end of file
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 00000000..a1e06123
--- /dev/null
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,28 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/docs/docs.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/noxfile.py b/.kokoro/noxfile.py
new file mode 100644
index 00000000..7c8a6399
--- /dev/null
+++ b/.kokoro/noxfile.py
@@ -0,0 +1,292 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import glob
+import os
+from pathlib import Path
+import sys
+from typing import Callable, Dict, Optional
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+BLACK_VERSION = "black==22.3.0"
+ISORT_VERSION = "isort==5.10.1"
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": [],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ "enforce_type_hints": False,
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append(".")
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars() -> Dict[str, str]:
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG["gcloud_project_env"]
+ # This should error out if not set.
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG["envs"])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to test samples.
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+#
+# Style Checks
+#
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG["enforce_type_hints"]:
+ session.install("flake8")
+ else:
+ session.install("flake8", "flake8-annotations")
+
+ args = FLAKE8_COMMON_ARGS + [
+ ".",
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Black
+#
+
+
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ """Run black. Format code to uniform standard."""
+ session.install(BLACK_VERSION)
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
+
+#
+# format = isort + black
+#
+
+@nox.session
+def format(session: nox.sessions.Session) -> None:
+ """
+ Run isort to sort imports. Then run black
+ to format code to uniform standard.
+ """
+ session.install(BLACK_VERSION, ISORT_VERSION)
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ # Use the --fss option to sort imports using strict alphabetical order.
+ # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections
+ session.run("isort", "--fss", *python_files)
+ session.run("black", *python_files)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
+ # check for presence of tests
+ test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True)
+ test_list.extend(glob.glob("**/tests", recursive=True))
+
+ if len(test_list) == 0:
+ print("No tests found, skipping directory.")
+ return
+
+ if TEST_CONFIG["pip_version_override"]:
+ pip_version = TEST_CONFIG["pip_version_override"]
+ session.install(f"pip=={pip_version}")
+ """Runs py.test for a particular project."""
+ concurrent_args = []
+ if os.path.exists("requirements.txt"):
+ if os.path.exists("constraints.txt"):
+ session.install("-r", "requirements.txt", "-c", "constraints.txt")
+ else:
+ session.install("-r", "requirements.txt")
+ with open("requirements.txt") as rfile:
+ packages = rfile.read()
+
+ if os.path.exists("requirements-test.txt"):
+ if os.path.exists("constraints-test.txt"):
+ session.install(
+ "-r", "requirements-test.txt", "-c", "constraints-test.txt"
+ )
+ else:
+ session.install("-r", "requirements-test.txt")
+ with open("requirements-test.txt") as rtfile:
+ packages += rtfile.read()
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ if "pytest-parallel" in packages:
+ concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto'])
+ elif "pytest-xdist" in packages:
+ concurrent_args.extend(['-n', 'auto'])
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars(),
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session: nox.sessions.Session) -> None:
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root() -> Optional[str]:
+ """ Returns the root folder of the project. """
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session: nox.sessions.Session, path: str) -> None:
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 00000000..c435402f
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2024 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/presubmit.bat b/.kokoro/presubmit.bat
new file mode 100644
index 00000000..71c6511d
--- /dev/null
+++ b/.kokoro/presubmit.bat
@@ -0,0 +1,32 @@
+@rem Copyright 2019 Google LLC. All rights reserved.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+
+@echo "Starting Windows build"
+
+cd /d %~dp0
+cd ..
+
+@rem as this package uses submodules make sure we have all content
+call git config --global --add safe.directory C:/tmpfs/src/github/python-crc32c
+call git submodule update --recursive || goto :error
+
+@echo "Build Wheels and Run Tests"
+call scripts\windows\build.bat || goto :error
+
+for /r %%a in (*.whl) do xcopy "%%a" %KOKORO_ARTIFACTS_DIR% /i
+
+goto :EOF
+
+:error
+exit /b 1
diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg
new file mode 100644
index 00000000..bf66b2f3
--- /dev/null
+++ b/.kokoro/presubmit/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/linux/common.cfg b/.kokoro/presubmit/linux/common.cfg
new file mode 100644
index 00000000..bf66b2f3
--- /dev/null
+++ b/.kokoro/presubmit/linux/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/linux/presubmit-linux.cfg b/.kokoro/presubmit/linux/presubmit-linux.cfg
new file mode 100644
index 00000000..c0f213b7
--- /dev/null
+++ b/.kokoro/presubmit/linux/presubmit-linux.cfg
@@ -0,0 +1,8 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+build_file: "python-crc32c/.kokoro/build-manylinux.sh"
diff --git a/.kokoro/presubmit/osx/common.cfg b/.kokoro/presubmit/osx/common.cfg
new file mode 100644
index 00000000..bf66b2f3
--- /dev/null
+++ b/.kokoro/presubmit/osx/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/osx/presubmit-osx-slcn.cfg b/.kokoro/presubmit/osx/presubmit-osx-slcn.cfg
new file mode 100644
index 00000000..81c893d2
--- /dev/null
+++ b/.kokoro/presubmit/osx/presubmit-osx-slcn.cfg
@@ -0,0 +1,9 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/build-osx.sh"
diff --git a/.kokoro/presubmit/osx/presubmit-osx.cfg b/.kokoro/presubmit/osx/presubmit-osx.cfg
new file mode 100644
index 00000000..81c893d2
--- /dev/null
+++ b/.kokoro/presubmit/osx/presubmit-osx.cfg
@@ -0,0 +1,9 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/build-osx.sh"
diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg
new file mode 100644
index 00000000..3595fb43
--- /dev/null
+++ b/.kokoro/presubmit/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/presubmit/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/presubmit/windows/common.cfg b/.kokoro/presubmit/windows/common.cfg
new file mode 100644
index 00000000..44a039a9
--- /dev/null
+++ b/.kokoro/presubmit/windows/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/presubmit.bat"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/windows/presubmit-windows.cfg b/.kokoro/presubmit/windows/presubmit-windows.cfg
new file mode 100644
index 00000000..8db3dd41
--- /dev/null
+++ b/.kokoro/presubmit/windows/presubmit-windows.cfg
@@ -0,0 +1,9 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/presubmit.bat"
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
new file mode 100755
index 00000000..233205d5
--- /dev/null
+++ b/.kokoro/publish-docs.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+export PATH="${HOME}/.local/bin:${PATH}"
+
+# Install nox
+python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt
+python3.10 -m nox --version
+
+# build docs
+nox -s docs
+
+# create metadata
+python3.10 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3.10 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3.10 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3.10 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3.10 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3.10 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release-manylinux-wheel.sh b/.kokoro/release-manylinux-wheel.sh
new file mode 100755
index 00000000..9a3d913d
--- /dev/null
+++ b/.kokoro/release-manylinux-wheel.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+set -eo pipefail
+
+cd github/python-crc32c
+
+export PUBLISH_WHEELS="true"
+
+# Build and publish ManyLinux wheels
+./scripts/manylinux/build.sh
diff --git a/.kokoro/release-osx-wheel.sh b/.kokoro/release-osx-wheel.sh
new file mode 100755
index 00000000..a33dce92
--- /dev/null
+++ b/.kokoro/release-osx-wheel.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+set -eo pipefail
+
+REPO_ROOT=$(pwd)/github/python-crc32c
+cd github/python-crc32c
+
+export PUBLISH_WHEELS="true"
+
+# Build and publish OSX wheels
+./scripts/osx/build.sh
diff --git a/.kokoro/release-windows-wheel.bat b/.kokoro/release-windows-wheel.bat
new file mode 100755
index 00000000..d3564e1d
--- /dev/null
+++ b/.kokoro/release-windows-wheel.bat
@@ -0,0 +1,44 @@
+@rem Copyright 2019 Google LLC. All rights reserved.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+
+@echo "Starting Windows release"
+
+cd /d %~dp0
+cd ..
+
+@echo "Build Wheel"
+call scripts\windows\build.bat || goto :error
+
+@echo "Start the releasetool reporter"
+call py -3 -m pip install gcp-releasetool || goto :error
+call py -3 -m releasetool publish-reporter-script > C:\temp\publisher-script || goto :error
+
+@echo "Ensure that we have the latest versions of Twine, Wheel, and Setuptools."
+call py -3 -m pip install --upgrade twine wheel setuptools || goto :error
+
+@echo "Disable buffering, so that the logs stream through."
+set PYTHONUNBUFFERED=1
+
+@echo "## RELASE WORKFLOW SUCCESSFUL ##"
+@echo "## Uploading Wheels ##"
+
+@echo "Move into the package, build the distribution and upload."
+set /p TWINE_PASSWORD=<%KOKORO_KEYSTORE_DIR%/73713_google-cloud-pypi-token-keystore-1
+call py -3 setup.py sdist || goto :error
+call py -3 -m twine upload --skip-existing --username __token__ --password "%TWINE_PASSWORD%" dist/* wheels/google_crc32c* || goto :error
+
+goto :EOF
+
+:error
+exit /b 1
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
new file mode 100644
index 00000000..04f24d4d
--- /dev/null
+++ b/.kokoro/release/common.cfg
@@ -0,0 +1,49 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release.sh"
+}
+
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google-cloud-pypi-token-keystore-1"
+ }
+ }
+}
+
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
+
+# Store the packages we uploaded to PyPI. That way, we have a record of exactly
+# what we published, which we can use to generate SBOMs and attestations.
+action {
+ define_artifacts {
+ regex: "github/python-crc32c/**/*.tar.gz"
+ strip_prefix: "github/python-crc32c"
+ }
+}
diff --git a/.kokoro/release/linux/common.cfg b/.kokoro/release/linux/common.cfg
new file mode 100644
index 00000000..694320c7
--- /dev/null
+++ b/.kokoro/release/linux/common.cfg
@@ -0,0 +1,44 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release.sh"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google-cloud-pypi-token-keystore-1"
+ }
+ }
+}
diff --git a/.kokoro/release/linux/linux_wheel.cfg b/.kokoro/release/linux/linux_wheel.cfg
new file mode 100644
index 00000000..0c32ba5e
--- /dev/null
+++ b/.kokoro/release/linux/linux_wheel.cfg
@@ -0,0 +1,14 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release-manylinux-wheel.sh"
+}
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/release-manylinux-wheel.sh"
diff --git a/.kokoro/release/osx/common.cfg b/.kokoro/release/osx/common.cfg
new file mode 100644
index 00000000..694320c7
--- /dev/null
+++ b/.kokoro/release/osx/common.cfg
@@ -0,0 +1,44 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release.sh"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google-cloud-pypi-token-keystore-1"
+ }
+ }
+}
diff --git a/.kokoro/release/osx/osx-slcn_wheel.cfg b/.kokoro/release/osx/osx-slcn_wheel.cfg
new file mode 100644
index 00000000..3a40d7a8
--- /dev/null
+++ b/.kokoro/release/osx/osx-slcn_wheel.cfg
@@ -0,0 +1,14 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release-osx-wheel.sh"
+}
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/release-osx-wheel.sh"
diff --git a/.kokoro/release/osx/osx_wheel.cfg b/.kokoro/release/osx/osx_wheel.cfg
new file mode 100644
index 00000000..3a40d7a8
--- /dev/null
+++ b/.kokoro/release/osx/osx_wheel.cfg
@@ -0,0 +1,14 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release-osx-wheel.sh"
+}
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/release-osx-wheel.sh"
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/release/release.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/release/windows/common.cfg b/.kokoro/release/windows/common.cfg
new file mode 100644
index 00000000..694320c7
--- /dev/null
+++ b/.kokoro/release/windows/common.cfg
@@ -0,0 +1,44 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release.sh"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google-cloud-pypi-token-keystore-1"
+ }
+ }
+}
diff --git a/.kokoro/release/windows/windows_wheel.cfg b/.kokoro/release/windows/windows_wheel.cfg
new file mode 100644
index 00000000..1f640148
--- /dev/null
+++ b/.kokoro/release/windows/windows_wheel.cfg
@@ -0,0 +1,14 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/release-windows-wheel.sh"
+}
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "crc32c"
+}
+
+build_file: "python-crc32c/.kokoro/release-windows-wheel.bat"
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
new file mode 100644
index 00000000..fff4d9ce
--- /dev/null
+++ b/.kokoro/requirements.in
@@ -0,0 +1,11 @@
+gcp-docuploader
+gcp-releasetool>=2 # required for compatibility with cryptography>=42.x
+importlib-metadata
+typing-extensions
+twine
+wheel
+setuptools
+nox>=2022.11.21 # required to remove dependency on py
+charset-normalizer<3
+click<8.1.0
+cryptography>=42.0.5
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
new file mode 100644
index 00000000..9622baf0
--- /dev/null
+++ b/.kokoro/requirements.txt
@@ -0,0 +1,537 @@
+#
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
+#
+# pip-compile --allow-unsafe --generate-hashes requirements.in
+#
+argcomplete==3.4.0 \
+ --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
+ --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
+ # via nox
+attrs==23.2.0 \
+ --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \
+ --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1
+ # via gcp-releasetool
+backports-tarfile==1.2.0 \
+ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \
+ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991
+ # via jaraco-context
+cachetools==5.3.3 \
+ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \
+ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105
+ # via google-auth
+certifi==2024.7.4 \
+ --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
+ --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
+ # via requests
+cffi==1.16.0 \
+ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \
+ --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \
+ --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \
+ --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
+ --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
+ --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \
+ --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \
+ --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \
+ --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \
+ --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \
+ --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \
+ --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \
+ --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \
+ --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \
+ --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
+ --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \
+ --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \
+ --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \
+ --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \
+ --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \
+ --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \
+ --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \
+ --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
+ --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \
+ --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \
+ --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
+ --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \
+ --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \
+ --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \
+ --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
+ --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \
+ --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \
+ --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
+ --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
+ --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \
+ --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \
+ --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
+ --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \
+ --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \
+ --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \
+ --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
+ --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \
+ --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \
+ --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \
+ --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \
+ --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \
+ --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
+ --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \
+ --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \
+ --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \
+ --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
+ --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357
+ # via cryptography
+charset-normalizer==2.1.1 \
+ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
+ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
+ # via
+ # -r requirements.in
+ # requests
+click==8.0.4 \
+ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \
+ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb
+ # via
+ # -r requirements.in
+ # gcp-docuploader
+ # gcp-releasetool
+colorlog==6.8.2 \
+ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
+ --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
+ # via
+ # gcp-docuploader
+ # nox
+cryptography==42.0.8 \
+ --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \
+ --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \
+ --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \
+ --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \
+ --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \
+ --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \
+ --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \
+ --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \
+ --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \
+ --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \
+ --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \
+ --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \
+ --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \
+ --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \
+ --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \
+ --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \
+ --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \
+ --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \
+ --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \
+ --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \
+ --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \
+ --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \
+ --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \
+ --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \
+ --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \
+ --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \
+ --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \
+ --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \
+ --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \
+ --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \
+ --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \
+ --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e
+ # via
+ # -r requirements.in
+ # gcp-releasetool
+ # secretstorage
+distlib==0.3.8 \
+ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
+ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
+ # via virtualenv
+docutils==0.21.2 \
+ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
+ --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2
+ # via readme-renderer
+filelock==3.15.4 \
+ --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
+ --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
+ # via virtualenv
+gcp-docuploader==0.6.5 \
+ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
+ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
+ # via -r requirements.in
+gcp-releasetool==2.0.1 \
+ --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \
+ --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62
+ # via -r requirements.in
+google-api-core==2.19.1 \
+ --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \
+ --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd
+ # via
+ # google-cloud-core
+ # google-cloud-storage
+google-auth==2.31.0 \
+ --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \
+ --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871
+ # via
+ # gcp-releasetool
+ # google-api-core
+ # google-cloud-core
+ # google-cloud-storage
+google-cloud-core==2.4.1 \
+ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
+ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
+ # via google-cloud-storage
+google-cloud-storage==2.17.0 \
+ --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \
+ --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1
+ # via gcp-docuploader
+google-crc32c==1.5.0 \
+ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \
+ --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \
+ --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \
+ --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \
+ --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \
+ --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \
+ --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \
+ --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \
+ --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \
+ --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \
+ --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \
+ --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \
+ --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \
+ --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \
+ --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \
+ --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \
+ --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \
+ --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \
+ --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \
+ --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \
+ --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \
+ --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \
+ --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \
+ --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \
+ --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \
+ --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \
+ --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \
+ --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \
+ --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \
+ --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \
+ --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \
+ --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \
+ --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \
+ --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \
+ --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \
+ --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \
+ --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \
+ --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \
+ --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \
+ --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \
+ --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \
+ --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \
+ --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \
+ --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \
+ --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \
+ --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \
+ --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \
+ --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \
+ --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \
+ --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \
+ --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \
+ --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \
+ --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \
+ --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \
+ --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \
+ --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \
+ --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \
+ --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \
+ --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \
+ --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \
+ --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \
+ --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \
+ --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \
+ --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \
+ --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \
+ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \
+ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \
+ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4
+ # via
+ # google-cloud-storage
+ # google-resumable-media
+google-resumable-media==2.7.1 \
+ --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \
+ --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33
+ # via google-cloud-storage
+googleapis-common-protos==1.63.2 \
+ --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \
+ --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87
+ # via google-api-core
+idna==3.7 \
+ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
+ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
+ # via requests
+importlib-metadata==8.0.0 \
+ --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \
+ --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812
+ # via
+ # -r requirements.in
+ # keyring
+ # twine
+jaraco-classes==3.4.0 \
+ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \
+ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790
+ # via keyring
+jaraco-context==5.3.0 \
+ --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \
+ --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2
+ # via keyring
+jaraco-functools==4.0.1 \
+ --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \
+ --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8
+ # via keyring
+jeepney==0.8.0 \
+ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
+ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
+ # via
+ # keyring
+ # secretstorage
+jinja2==3.1.4 \
+ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
+ # via gcp-releasetool
+keyring==25.2.1 \
+ --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \
+ --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b
+ # via
+ # gcp-releasetool
+ # twine
+markdown-it-py==3.0.0 \
+ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
+ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
+ # via rich
+markupsafe==2.1.5 \
+ --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
+ --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
+ --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
+ --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
+ --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
+ --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
+ --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
+ --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
+ --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
+ --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
+ --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
+ --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
+ --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
+ --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
+ --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
+ --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
+ --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
+ --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
+ --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
+ --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
+ --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
+ --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
+ --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
+ --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
+ --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
+ --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
+ --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
+ --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
+ --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
+ --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
+ --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
+ --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
+ --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
+ --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
+ --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
+ --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
+ --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
+ --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
+ --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
+ --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
+ --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
+ --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
+ --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
+ --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
+ --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
+ --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
+ --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
+ --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
+ --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
+ --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
+ --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
+ --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
+ --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
+ --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
+ --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
+ --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
+ --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
+ --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
+ --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
+ --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
+ # via jinja2
+mdurl==0.1.2 \
+ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
+ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
+ # via markdown-it-py
+more-itertools==10.3.0 \
+ --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \
+ --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320
+ # via
+ # jaraco-classes
+ # jaraco-functools
+nh3==0.2.18 \
+ --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \
+ --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \
+ --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \
+ --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \
+ --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \
+ --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \
+ --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \
+ --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \
+ --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \
+ --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \
+ --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \
+ --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \
+ --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \
+ --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \
+ --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \
+ --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe
+ # via readme-renderer
+nox==2024.4.15 \
+ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
+ --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
+ # via -r requirements.in
+packaging==24.1 \
+ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
+ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
+ # via
+ # gcp-releasetool
+ # nox
+pkginfo==1.10.0 \
+ --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \
+ --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097
+ # via twine
+platformdirs==4.2.2 \
+ --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
+ --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
+ # via virtualenv
+proto-plus==1.24.0 \
+ --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \
+ --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12
+ # via google-api-core
+protobuf==5.27.2 \
+ --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \
+ --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \
+ --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \
+ --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \
+ --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \
+ --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \
+ --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \
+ --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \
+ --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \
+ --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \
+ --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714
+ # via
+ # gcp-docuploader
+ # gcp-releasetool
+ # google-api-core
+ # googleapis-common-protos
+ # proto-plus
+pyasn1==0.6.0 \
+ --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \
+ --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473
+ # via
+ # pyasn1-modules
+ # rsa
+pyasn1-modules==0.4.0 \
+ --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \
+ --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b
+ # via google-auth
+pycparser==2.22 \
+ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \
+ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc
+ # via cffi
+pygments==2.18.0 \
+ --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \
+ --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a
+ # via
+ # readme-renderer
+ # rich
+pyjwt==2.8.0 \
+ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \
+ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320
+ # via gcp-releasetool
+pyperclip==1.9.0 \
+ --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310
+ # via gcp-releasetool
+python-dateutil==2.9.0.post0 \
+ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
+ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
+ # via gcp-releasetool
+readme-renderer==44.0 \
+ --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \
+ --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1
+ # via twine
+requests==2.32.3 \
+ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
+ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
+ # via
+ # gcp-releasetool
+ # google-api-core
+ # google-cloud-storage
+ # requests-toolbelt
+ # twine
+requests-toolbelt==1.0.0 \
+ --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \
+ --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06
+ # via twine
+rfc3986==2.0.0 \
+ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \
+ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c
+ # via twine
+rich==13.7.1 \
+ --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \
+ --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432
+ # via twine
+rsa==4.9 \
+ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
+ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
+ # via google-auth
+secretstorage==3.3.3 \
+ --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
+ --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
+ # via keyring
+six==1.16.0 \
+ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
+ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
+ # via
+ # gcp-docuploader
+ # python-dateutil
+tomli==2.0.1 \
+ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
+ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
+ # via nox
+twine==5.1.1 \
+ --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
+ --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
+ # via -r requirements.in
+typing-extensions==4.12.2 \
+ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
+ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
+ # via -r requirements.in
+urllib3==2.2.2 \
+ --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \
+ --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168
+ # via
+ # requests
+ # twine
+virtualenv==20.26.3 \
+ --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
+ --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
+ # via nox
+wheel==0.43.0 \
+ --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \
+ --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81
+ # via -r requirements.in
+zipp==3.19.2 \
+ --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \
+ --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c
+ # via importlib-metadata
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==70.2.0 \
+ --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \
+ --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1
+ # via -r requirements.in
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 00000000..2c8ea0ee
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 00000000..50fec964
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg
new file mode 100644
index 00000000..cb31f794
--- /dev/null
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.10"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-310"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
new file mode 100644
index 00000000..1f21f1bb
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg
new file mode 100644
index 00000000..65a83de3
--- /dev/null
+++ b/.kokoro/samples/python3.11/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.11"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-311"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.11/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg
new file mode 100644
index 00000000..1f21f1bb
--- /dev/null
+++ b/.kokoro/samples/python3.11/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.11/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.11/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg
new file mode 100644
index 00000000..9b19ad26
--- /dev/null
+++ b/.kokoro/samples/python3.12/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.12"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-312"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.12/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
new file mode 100644
index 00000000..1f21f1bb
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.12/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 00000000..84712365
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 00000000..1f21f1bb
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 00000000..c2f4b9dc
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 00000000..1f21f1bb
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 00000000..67e3e6bc
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-crc32c/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 00000000..1f21f1bb
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-crc32c/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 00000000..e9d8bd79
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 00000000..55910c8b
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples don't exist
+if ! find samples -name 'requirements.txt' | grep -q .; then
+ echo "No tests run. './samples/**/requirements.txt' not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.9 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.9 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 00000000..7933d820
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
+ fi
+fi
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251f..48f79699 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2017 Google Inc.
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
new file mode 100755
index 00000000..35fa5292
--- /dev/null
+++ b/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For FlakyBot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..1d74695f
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,31 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.0.1
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+- repo: https://github.com/psf/black
+ rev: 23.7.0
+ hooks:
+ - id: black
+- repo: https://github.com/pycqa/flake8
+ rev: 6.1.0
+ hooks:
+ - id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
new file mode 100644
index 00000000..abe49ce2
--- /dev/null
+++ b/.repo-metadata.json
@@ -0,0 +1,14 @@
+{
+ "name": "google-crc32c",
+ "name_pretty": "A python wrapper of the C library 'Google CRC32C'",
+ "product_documentation": "",
+ "client_documentation": "https://github.com/googleapis/python-crc32c",
+ "issue_tracker": "https://github.com/googleapis/python-crc32c/issues",
+ "release_level": "stable",
+ "language": "python",
+ "library_type": "OTHER",
+ "repo": "googleapis/python-crc32c",
+ "distribution_name": "google-crc32c",
+ "default_version": "",
+ "codeowner_team": ""
+}
diff --git a/.trampolinerc b/.trampolinerc
new file mode 100644
index 00000000..00801523
--- /dev/null
+++ b/.trampolinerc
@@ -0,0 +1,61 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add required env vars here.
+required_envvars+=(
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/CHANGELOG.md b/CHANGELOG.md
index efb079d9..2eb1ffc2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,21 @@
[1]: https://pypi.org/project/google-crc32c/#history
+## [1.6.0](https://github.com/googleapis/python-crc32c/compare/v1.5.0...v1.6.0) (2024-08-29)
+
+
+### Features
+
+* Add support for python 3.12 ([#177](https://github.com/googleapis/python-crc32c/issues/177)) ([5ff1207](https://github.com/googleapis/python-crc32c/commit/5ff1207e7b60256e7a32932324ccb9ad4ec265d2))
+* Build using Visual Studio 17 2022 instead of Visual Studio 16 2019 ([c1c8c59](https://github.com/googleapis/python-crc32c/commit/c1c8c597d07e573406d76765022a837b007f9074))
+
+
+### Bug Fixes
+
+* Drop support for python 3.7 and 3.8 ([c1c8c59](https://github.com/googleapis/python-crc32c/commit/c1c8c597d07e573406d76765022a837b007f9074))
+* Drop support for Windows 32bit which is not supported in Visual Studio 17 2022 ([c1c8c59](https://github.com/googleapis/python-crc32c/commit/c1c8c597d07e573406d76765022a837b007f9074))
+* Remove manylinux1 which is no longer supported by PyPA ([#186](https://github.com/googleapis/python-crc32c/issues/186)) ([79edb3f](https://github.com/googleapis/python-crc32c/commit/79edb3fd3cda0e4193a6fb6a8346058398df43de))
+
## [1.5.0](https://github.com/googleapis/python-crc32c/compare/v1.4.0...v1.5.0) (2022-08-31)
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index 46b2a08e..039f4368 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,43 +1,95 @@
-# Contributor Code of Conduct
+
+# Code of Conduct
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
index b4473760..d6456956 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,3 +1,4 @@
+
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/README.md b/README.md
index adf13671..62f80b67 100644
--- a/README.md
+++ b/README.md
@@ -9,9 +9,9 @@ C toolchain.
# Currently Published Wheels
-Wheels are currently published for Python 3.7, 3.8, 3.9, 3.10, 3.11 and
-pypy-3.7 for multiple architectures. For information on building your own,
-please view [BUILDING.md](BUILDING.md).
+Wheels are currently published for CPython 3.9, 3.10, 3.11 and 3.12
+for multiple architectures. PyPy 3.9 and 3.10 are also supported for Linux.
+For information on building your own wheels please view [BUILDING.md](BUILDING.md).
## Linux
@@ -21,18 +21,16 @@ Wheels are published for the following platforms / architectures:
- `manylinux2010` platform, `x86_64` and `1686` architectures
- `manylinux2014` platform, `aarch64` architecture
-### Deprecated
-
-- `manylinux1` platform, `x86_64` architecture wheels will be built until
- 2021-12-31, when the PyPA group drops support for the `manylinux1` image.
- See: https://github.com/pypa/manylinux/issues/994
+### Unsupported Platforms
+- `manylinux1` platform, `x86_64` architecture support has ended.
+See https://github.com/pypa/manylinux/issues/994.
## Mac OS
-Wheels are published for `x86_64` and `universal2` architectures.
+Wheels are published for `x86_64` and `arm64` architectures.
## Windows
-Wheels are published for `win32` and `win_amd64` architectures.
+Wheels are published for the `win_amd64` architecture.
diff --git a/noxfile.py b/noxfile.py
index c03ba807..c785639d 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -24,7 +24,7 @@
HERE = os.path.dirname(__file__)
-@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11"])
+@nox.session(python=["3.9", "3.10", "3.11", "3.12"])
def build_libcrc32c(session):
session.env["PY_BIN"] = f"python{session.python}"
session.env["REPO_ROOT"] = HERE
@@ -39,7 +39,7 @@ def build_libcrc32c(session):
raise Exception("Unsupported")
-@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11"])
+@nox.session(python=["3.9", "3.10", "3.11", "3.12"])
def check(session):
session.install("pytest")
session.install("--no-index", f"--find-links={HERE}/wheels", "google-crc32c")
diff --git a/owlbot.py b/owlbot.py
new file mode 100644
index 00000000..cec3cf40
--- /dev/null
+++ b/owlbot.py
@@ -0,0 +1,51 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script is used to synthesize generated parts of this library."""
+
+import re
+
+import synthtool as s
+from synthtool import gcp
+from synthtool.languages import python
+
+common = gcp.CommonTemplates()
+
+# ----------------------------------------------------------------------------
+# Add templated files
+# ----------------------------------------------------------------------------
+templated_files = common.py_library(
+ microgenerator=True,
+)
+
+# this is an http library, not grpc
+s.move(
+ templated_files,
+ excludes=[
+ ".github/workflows",
+ ".gitignore",
+ "CONTRIBUTING.rst",
+ "docs/**/*",
+ "MANIFEST.in",
+ "noxfile.py",
+ "README.rst",
+ "scripts/**/*",
+ "setup.py",
+ "setup.cfg",
+ "testing/**/*",
+ ".kokoro/release.sh", # not needed
+ ".kokoro/build.sh", # not needed
+ ],
+)
+
diff --git a/renovate.json b/renovate.json
index 314e71cb..39b2a0ec 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,8 +1,11 @@
{
"extends": [
- "config:base", ":disableDependencyDashboard"
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
],
- "ignorePaths": [".pre-commit-config.yaml", "scripts/dev-requirements.txt"],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"],
"pip_requirements": {
"fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
}
diff --git a/scripts/local-linux/build_libcrc32c.sh b/scripts/local-linux/build_libcrc32c.sh
index 6483045f..705f2477 100755
--- a/scripts/local-linux/build_libcrc32c.sh
+++ b/scripts/local-linux/build_libcrc32c.sh
@@ -14,7 +14,7 @@
set -e -x
-PY_BIN=${PY_BIN:-python3.7}
+PY_BIN=${PY_BIN:-python3.9}
REPO_ROOT=${REPO_ROOT:-$(pwd)}
CRC32C_INSTALL_PREFIX=${REPO_ROOT}/usr
diff --git a/scripts/manylinux/build.sh b/scripts/manylinux/build.sh
index 31e18b7b..35fcc6aa 100755
--- a/scripts/manylinux/build.sh
+++ b/scripts/manylinux/build.sh
@@ -21,25 +21,16 @@ MANYLINUX_DIR=$(echo $(cd $(dirname ${0}); pwd))
SCRIPTS_DIR=$(dirname ${MANYLINUX_DIR})
REPO_ROOT=$(dirname ${SCRIPTS_DIR})
+sudo apt-get update
+sudo apt-get install -y python3.9
cd $REPO_ROOT
-git submodule update --init --recursive
+# Add directory as safe to avoid "detected dubious ownership" fatal issue1
+git config --global --add safe.directory $REPO_ROOT
+git config --global --add safe.directory $REPO_ROOT/google_crc32c
+git submodule update --init --recursive
-# Note: PyPA's support for the 'manylinux1' image ends on 2022-01-01.
-# See: https://github.com/pypa/manylinux/issues/994
-# No 3.1* support for 'manylinux1'.
-if [[ "${BUILD_PYTHON}" != "3.1"* ]]; then
- docker pull quay.io/pypa/manylinux1_x86_64
- docker run \
- --rm \
- --interactive \
- --volume ${REPO_ROOT}:/var/code/python-crc32c/ \
- --env BUILD_PYTHON=${BUILD_PYTHON} \
- quay.io/pypa/manylinux1_x86_64 \
- /var/code/python-crc32c/scripts/manylinux/build_on_centos.sh
-fi
-
-docker pull quay.io/pypa/manylinux2010_x86_64
+docker pull quay.io/pypa/manylinux2010_x86_64
docker run \
--rm \
--interactive \
@@ -66,3 +57,7 @@ docker run \
--env BUILD_PYTHON=${BUILD_PYTHON} \
quay.io/pypa/manylinux2014_aarch64 \
/var/code/python-crc32c/scripts/manylinux/build_on_centos.sh
+
+if [[ "${PUBLISH_WHEELS}" == "true" ]]; then
+ . /${MANYLINUX_DIR}/publish_python_wheel.sh
+fi
diff --git a/scripts/manylinux/build_on_centos.sh b/scripts/manylinux/build_on_centos.sh
index e6d2871d..2df9c2ca 100755
--- a/scripts/manylinux/build_on_centos.sh
+++ b/scripts/manylinux/build_on_centos.sh
@@ -14,8 +14,7 @@
# limitations under the License.
set -e -x
-
-MAIN_PYTHON_BIN="/opt/python/cp37-cp37m/bin"
+MAIN_PYTHON_BIN="/opt/python/cp39-cp39/bin/"
echo "BUILD_PYTHON: ${BUILD_PYTHON}"
REPO_ROOT=/var/code/python-crc32c/
@@ -45,16 +44,16 @@ if [[ -z ${BUILD_PYTHON} ]]; then
# Collect all target Python versions.
for PYTHON_BIN in /opt/python/*/bin; do
# H/T: https://stackoverflow.com/a/229606/1068170
- if [[ "${PYTHON_BIN}" == *"37"* ]]; then
+ if [[ "${PYTHON_BIN}" == *"39"* ]]; then
PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}"
continue
- elif [[ "${PYTHON_BIN}" == *"38"* ]]; then
+ elif [[ "${PYTHON_BIN}" == *"310"* ]]; then
PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}"
continue
- elif [[ "${PYTHON_BIN}" == *"39"* ]]; then
+ elif [[ "${PYTHON_BIN}" == *"311"* ]]; then
PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}"
continue
- elif [[ "${PYTHON_BIN}" == *"310"* ]]; then
+ elif [[ "${PYTHON_BIN}" == *"312"* ]]; then
PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}"
continue
else
diff --git a/scripts/manylinux/check.sh b/scripts/manylinux/check.sh
index a069ff9d..f5f38150 100755
--- a/scripts/manylinux/check.sh
+++ b/scripts/manylinux/check.sh
@@ -20,25 +20,41 @@ echo "CHECKING ON LINUX"
VERSION=$(awk "/version \= ([0-9.]+)/" setup.cfg)
PACKAGE_VERSION=${VERSION:10}
-WHEEL_FILE="wheels/google_crc32c-${PACKAGE_VERSION}-cp36-cp36m-manylinux1_x86_64.whl"
-PYTHON=python3.7
-# Using pyenv, set 3.7.13 as a local python version.
-# pyenv versions
-pyenv local 3.7.13
-
-# Make sure we can create a virtual environment.
-${PYTHON} -m pip install --upgrade setuptools pip wheel
-
-# Create a virtual environment.
-${PYTHON} -m venv venv
-
-# Install the wheel.
-venv/bin/pip install ${WHEEL_FILE}
-
-# Verify that the module is installed and peek at contents.
-venv/bin/python scripts/check_crc32c_extension.py
-unzip -l ${WHEEL_FILE}
-
-# Clean up.
-rm -fr venv/
+# set up pyenv & shell environment for switching across python versions
+eval "$(pyenv init -)"
+eval "$(pyenv init --path)"
+install_python_pyenv() {
+ version=$1
+
+ if [ -z "$(pyenv versions --bare | grep $version)" ]; then
+ echo "Python $version is not installed. Installing..."
+ pyenv install $version
+ echo "Python $version installed."
+ else
+ echo "Python $version is already installed."
+ fi
+ pyenv shell $version
+}
+
+SUPPORTED_PYTHON_VERSIONS=("3.9" "3.10" "3.11" "3.12")
+
+for PYTHON_VERSION in "${SUPPORTED_PYTHON_VERSIONS[@]}"; do
+ PYTHON=python${PYTHON_VERSION}
+ install_python_pyenv ${PYTHON_VERSION}
+ ${PYTHON} -m pip install --upgrade setuptools pip wheel
+
+ # Create a virtual environment.
+ ${PYTHON} -m venv venv
+
+ # Install the wheel.
+ WHEEL_FILE="wheels/google_crc32c-${PACKAGE_VERSION}-cp${PYTHON_VERSION//.}-cp${PYTHON_VERSION//.}-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ venv/bin/pip install ${WHEEL_FILE}
+
+ # Verify that the module is installed and peek at contents.
+ venv/bin/python scripts/check_crc32c_extension.py
+ unzip -l ${WHEEL_FILE}
+
+ # Clean up.
+ rm -fr venv/
+done
diff --git a/scripts/manylinux/publish_python_wheel.sh b/scripts/manylinux/publish_python_wheel.sh
new file mode 100755
index 00000000..ccc55a4c
--- /dev/null
+++ b/scripts/manylinux/publish_python_wheel.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+python3.9 -m pip install "setuptools<71"
+
+# Start the releasetool reporter
+python3.9 -m pip install --require-hashes -r ${REPO_ROOT}/.kokoro/requirements.txt
+python3.9 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Move into the package, build the distribution and upload.
+TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1")
+python3.9 -m twine upload --skip-existing --username __token__ --password "${TWINE_PASSWORD}" ${REPO_ROOT}/wheels/*
diff --git a/scripts/osx/build.sh b/scripts/osx/build.sh
index bccfb838..326ba92d 100755
--- a/scripts/osx/build.sh
+++ b/scripts/osx/build.sh
@@ -16,6 +16,9 @@
set -e -x
echo "BUILDING FOR OSX"
+# set deployment target
+export MACOSX_DEPLOYMENT_TARGET=12
+
# ``readlink -f`` is not our friend on OS X. This relies on **some**
# ``python`` being installed.
SCRIPT_FI=$(python -c "import os; print(os.path.realpath('${0}'))")
@@ -23,47 +26,30 @@ OSX_DIR=$(dirname ${SCRIPT_FI})
SCRIPTS_DIR=$(dirname ${OSX_DIR})
export REPO_ROOT=$(dirname ${SCRIPTS_DIR})
-# NOTE: These are the Python.org versions of Python.
-PYTHON37="/Library/Frameworks/Python.framework/Versions/3.7/bin"
-PYTHON38="/Library/Frameworks/Python.framework/Versions/3.8/bin"
-PYTHON39="/Library/Frameworks/Python.framework/Versions/3.9/bin"
-PYTHON310="/Library/Frameworks/Python.framework/Versions/3.10/bin"
-PYTHON311="/Library/Frameworks/Python.framework/Versions/3.11/bin"
+# install required packages for pyenv
+# https://github.com/pyenv/pyenv/wiki#suggested-build-environment
+brew install openssl readline sqlite3 xz zlib tcl-tk
# Build and install `libcrc32c`
export PY_BIN="${PY_BIN:-python3}"
export CRC32C_INSTALL_PREFIX="${REPO_ROOT}/usr"
cd ${REPO_ROOT}
+# Add directory as safe to avoid "detected dubious ownership" fatal issue
+git config --global --add safe.directory $REPO_ROOT
+git config --global --add safe.directory $REPO_ROOT/google_crc32c
git submodule update --init --recursive
${OSX_DIR}/build_c_lib.sh
-# Build wheel for Python 3.7.
-export PY_BIN="python3.7"
-export PY_TAG="cp37-cp37m"
-${OSX_DIR}/build_python_wheel.sh
-
-# Build wheel for Python 3.8.
-# Note that the 'm' SOABI flag is no longer supported for Python >= 3.8
-export PY_BIN="python3.8"
-export PY_TAG="cp38-cp38"
-${OSX_DIR}/build_python_wheel.sh
-
-# Build wheel for Python 3.9.
-export PY_BIN="python3.9"
-export PY_TAG="cp39-cp39"
-${OSX_DIR}/build_python_wheel.sh
-
-# Build wheel for Python 3.10.
-export PY_BIN="python3.10"
-export PY_TAG="cp310-cp310"
-${OSX_DIR}/build_python_wheel.sh
+SUPPORTED_PYTHON_VERSIONS=("3.9" "3.10" "3.11" "3.12")
-# Build wheel for Python 3.11.
-export PY_BIN="python3.11"
-export PY_TAG="cp311-cp311"
-${OSX_DIR}/build_python_wheel.sh
+for PYTHON_VERSION in ${SUPPORTED_PYTHON_VERSIONS[@]}; do
+ echo "Build wheel for Python ${PYTHON_VERSION}"
+ export PY_BIN=$PYTHON_VERSION
+ export PY_TAG="cp${PYTHON_VERSION//.}-cp${PYTHON_VERSION//.}"
+ . /${OSX_DIR}/build_python_wheel.sh
+done
# Clean up.
rm -fr ${CRC32C_INSTALL_PREFIX}
diff --git a/scripts/osx/build_c_lib.sh b/scripts/osx/build_c_lib.sh
index 6a44322f..99b8bba7 100755
--- a/scripts/osx/build_c_lib.sh
+++ b/scripts/osx/build_c_lib.sh
@@ -54,23 +54,25 @@ ${VENV}/bin/python -m pip install "cmake >= 3.12.0"
# Build `libcrc32c`
cd ${REPO_ROOT}/google_crc32c
mkdir -p build
+cd build
ls
# We don't build i386 anymore as XCode no longer supports.
${VENV}/bin/cmake \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_OSX_DEPLOYMENT_TARGET=10.9 \
- -DCMAKE_OSX_ARCHITECTURES="x86_64" \
+ -DCMAKE_OSX_ARCHITECTURES="x86_64;arm64" \
-DCRC32C_BUILD_TESTS=no \
-DCRC32C_BUILD_BENCHMARKS=no \
-DBUILD_SHARED_LIBS=yes \
-DCMAKE_INSTALL_PREFIX:PATH=${CRC32C_INSTALL_PREFIX} \
-DCMAKE_INSTALL_NAME_DIR:PATH=${CRC32C_INSTALL_PREFIX}/lib \
- .
+ ..
# Install `libcrc32c` into CRC32C_INSTALL_PREFIX.
make all install
# Clean up.
+cd ..
rm -fr ${REPO_ROOT}/google_crc32c/build
rm -fr ${VENV}
diff --git a/scripts/osx/build_gh_action.sh b/scripts/osx/build_gh_action.sh
deleted file mode 100755
index 8b3bc908..00000000
--- a/scripts/osx/build_gh_action.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e -x
-echo "BUILDING FOR OSX"
-
-# ``readlink -f`` is not our friend on OS X. This relies on **some**
-# ``python`` being installed.
-SCRIPT_FI=$(python -c "import os; print(os.path.realpath('${0}'))")
-OSX_DIR=$(dirname ${SCRIPT_FI})
-SCRIPTS_DIR=$(dirname ${OSX_DIR})
-export REPO_ROOT=$(dirname ${SCRIPTS_DIR})
-
-# Build and install `libcrc32c`
-export PY_BIN="python3"
-export CRC32C_INSTALL_PREFIX="${REPO_ROOT}/usr"
-
-cd ${REPO_ROOT}
-git submodule update --init --recursive
-
-${OSX_DIR}/build_c_lib.sh
-
-# Build wheel
-export PY_BIN="python3"
-export PY_TAG="*"
-${OSX_DIR}/build_python_wheel.sh
-
-
-# Clean up.
-rm -fr ${CRC32C_INSTALL_PREFIX}
diff --git a/scripts/osx/build_python_wheel.sh b/scripts/osx/build_python_wheel.sh
index 8865c771..b00a8308 100755
--- a/scripts/osx/build_python_wheel.sh
+++ b/scripts/osx/build_python_wheel.sh
@@ -33,9 +33,28 @@ if [[ -z "${PY_TAG}" ]]; then
exit 1
fi
+# set up pyenv & shell environment for switching across python versions
+eval "$(pyenv init -)"
+eval "$(pyenv init --path)"
+
+install_python_pyenv() {
+ version=$1
+
+ if [ -z "$(pyenv versions --bare | grep $version)" ]; then
+ echo "Python $version is not installed. Installing..."
+ pyenv install $version
+ echo "Python $version installed."
+ else
+ echo "Python $version is already installed."
+ fi
+ pyenv shell $version
+}
+install_python_pyenv ${PY_BIN}
+
# Create a virtualenv where we can install Python build dependencies.
VENV=${REPO_ROOT}/venv${PY_BIN}
-${PY_BIN} -m venv ${VENV}
+"python${PY_BIN}" -m venv ${VENV}
+
curl https://bootstrap.pypa.io/get-pip.py | ${VENV}/bin/python
${VENV}/bin/python -m pip install \
--requirement ${REPO_ROOT}/scripts/dev-requirements.txt
@@ -50,14 +69,19 @@ ${VENV}/bin/python setup.py build_ext \
--rpath=${REPO_ROOT}/usr/lib
${VENV}/bin/python -m pip wheel ${REPO_ROOT} --wheel-dir ${DIST_WHEELS}
-# Delocate the wheel. removed --check-archs. We don't build i386.
+# Delocate the wheel.
FIXED_WHEELS="${REPO_ROOT}/wheels"
mkdir -p ${FIXED_WHEELS}
${VENV}/bin/delocate-wheel \
--wheel-dir ${FIXED_WHEELS} \
--verbose \
+ --check-archs \
${DIST_WHEELS}/google_crc32c*${PY_TAG}*.whl
+if [[ "${PUBLISH_WHEELS}" == "true" ]]; then
+ . /${OSX_DIR}/publish_python_wheel.sh
+fi
+
# Clean up.
rm -fr ${DIST_WHEELS}
rm -fr ${VENV}
diff --git a/scripts/osx/check.sh b/scripts/osx/check.sh
index 10d7b270..7ec869c8 100755
--- a/scripts/osx/check.sh
+++ b/scripts/osx/check.sh
@@ -25,63 +25,39 @@ OSX_DIR=$(dirname ${SCRIPT_FI})
SCRIPTS_DIR=$(dirname ${OSX_DIR})
export REPO_ROOT=$(dirname ${SCRIPTS_DIR})
+# set up pyenv & shell environment for switching across python versions
+eval "$(pyenv init -)"
+eval "$(pyenv init --path)"
ls ${REPO_ROOT}/wheels
-# NOTE: These are the Python.org versions of Python.
-PYTHON37="/Library/Frameworks/Python.framework/Versions/3.7/bin"
-PYTHON38="/Library/Frameworks/Python.framework/Versions/3.8/bin"
-PYTHON39="/Library/Frameworks/Python.framework/Versions/3.9/bin"
-PYTHON310="/Library/Frameworks/Python.framework/Versions/3.10/bin"
-PYTHON311="/Library/Frameworks/Python.framework/Versions/3.11/bin"
-
-# Make sure we have an updated `pip`.
-curl https://bootstrap.pypa.io/get-pip.py | ${PYTHON37}/python3
-
-# Make sure virtualenv and delocate.
-${PYTHON37}/python3 -m pip install --upgrade delocate
-LISTDEPS_CMD="${PYTHON37}/delocate-listdeps --all --depending"
-VIRTUALENV_CMD="${PYTHON37}/python3 -m venv"
-
-${PYTHON37}/python3 -m venv venv37
-curl https://bootstrap.pypa.io/get-pip.py | venv37/bin/python3
-WHL=${REPO_ROOT}/wheels/google_crc32c-${PACKAGE_VERSION}-cp37-cp37m-macosx_10_9_x86_64.whl
-venv37/bin/pip install ${WHL}
-venv37/bin/pip install pytest
-venv37/bin/py.test ${REPO_ROOT}/tests
-venv37/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py
-${LISTDEPS_CMD} ${WHL}
-rm -fr venv37
-
-${PYTHON38}/python3 -m venv venv38
-curl https://bootstrap.pypa.io/get-pip.py | venv38/bin/python3
-WHL=${REPO_ROOT}/wheels/google_crc32c-${PACKAGE_VERSION}-cp38-cp38-macosx_10_9_x86_64.whl
-venv38/bin/pip install ${WHL}
-venv38/bin/pip install pytest
-venv38/bin/py.test ${REPO_ROOT}/tests
-venv38/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py
-${LISTDEPS_CMD} ${WHL}
-rm -fr venv38
-
-${PYTHON39}/python3 -m venv venv39
-curl https://bootstrap.pypa.io/get-pip.py | venv39/bin/python3
-WHL=${REPO_ROOT}/wheels/google_crc32c-${PACKAGE_VERSION}-cp39-cp39-macosx_10_9_x86_64.whl
-venv39/bin/pip install ${WHL}
-venv39/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py
-${LISTDEPS_CMD} ${WHL}
-rm -fr venv39
-
-${PYTHON310}/python3 -m venv venv310
-curl https://bootstrap.pypa.io/get-pip.py | venv310/bin/python3
-WHL=${REPO_ROOT}/wheels/google_crc32c-${PACKAGE_VERSION}-cp310-cp310-macosx_10_9_x86_64.whl
-venv310/bin/pip install ${WHL}
-venv310/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py
-${LISTDEPS_CMD} ${WHL}
-rm -fr venv310
-
-${PYTHON311}/python3 -m venv venv311
-curl https://bootstrap.pypa.io/get-pip.py | venv311/bin/python3
-WHL=${REPO_ROOT}/wheels/google_crc32c-${PACKAGE_VERSION}-cp311-cp311-macosx_10_9_x86_64.whl
-venv311/bin/pip install ${WHL}
-venv311/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py
-${LISTDEPS_CMD} ${WHL}
-rm -fr venv311
+SUPPORTED_PYTHON_VERSIONS=("3.9" "3.10" "3.11" "3.12")
+
+for PYTHON_VERSION in ${SUPPORTED_PYTHON_VERSIONS[@]}; do
+ PYTHON="python${PYTHON_VERSION}"
+ pyenv shell $PYTHON_VERSION
+ VIRTUALENV="venv${PYTHON_VERSION//.}"
+ # Make sure we have an updated `pip`.
+ curl https://bootstrap.pypa.io/get-pip.py | ${PYTHON}
+ # Make sure virtualenv and delocate.
+ ${PYTHON} -m pip install --upgrade delocate
+ LISTDEPS_CMD="${PYTHON}/delocate-listdeps --all --depending"
+ ${PYTHON} -m venv ${VIRTUALENV}
+
+ OS_VERSION_STR="12_0"
+ if [ "${PYTHON_VERSION}" == "3.12" ]; then
+ OS_VERSION_STR="14.0"
+ fi
+
+ #WHL=${REPO_ROOT}/wheels/google_crc32c-${PACKAGE_VERSION}-cp${PYTHON_VERSION//.}-cp${PYTHON_VERSION//.}-macosx_${OS_VERSION_STR}_x86_64.whl
+ #${VIRTUALENV}/bin/pip install ${WHL} --force-reinstall
+
+ # Alternate method of finding the package that does not verify OS version is as expected
+ ${VIRTUALENV}/bin/pip install --no-index --find-links=${REPO_ROOT}/wheels google-crc32c --force-reinstall
+
+ ${VIRTUALENV}/bin/pip install pytest
+ ${VIRTUALENV}/bin/py.test ${REPO_ROOT}/tests
+ ${VIRTUALENV}/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py
+ #${LISTDEPS_CMD} ${WHL}
+ rm -fr ${VIRTUALENV}
+
+done
diff --git a/scripts/osx/publish_python_wheel.sh b/scripts/osx/publish_python_wheel.sh
new file mode 100755
index 00000000..676246d6
--- /dev/null
+++ b/scripts/osx/publish_python_wheel.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+PYTHON=$(PYENV_VERSION=3.9 pyenv which python)
+PYTHON_BIN=$(dirname ${PYTHON})
+
+# Start the releasetool reporter
+${PYTHON} -m pip install --require-hashes -r ${REPO_ROOT}/.kokoro/requirements.txt
+${PYTHON} -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
+
+TWINE=${PYTHON_BIN}/twine
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1")
+${PYTHON} -m twine upload --skip-existing --username __token__ --password "${TWINE_PASSWORD}" ${REPO_ROOT}/wheels/*
diff --git a/scripts/requirements.in b/scripts/requirements.in
new file mode 100644
index 00000000..0508fa5e
--- /dev/null
+++ b/scripts/requirements.in
@@ -0,0 +1,4 @@
+setuptools
+pip
+wheel
+cmake
\ No newline at end of file
diff --git a/scripts/requirements.txt b/scripts/requirements.txt
new file mode 100644
index 00000000..4401a449
--- /dev/null
+++ b/scripts/requirements.txt
@@ -0,0 +1,39 @@
+#
+# This file is autogenerated by pip-compile with python 3.10
+# To update, run:
+#
+# pip-compile --allow-unsafe --generate-hashes requirements.in
+#
+cmake==3.30.2 \
+ --hash=sha256:02657456811a8dd95d07728e0878ee6e09d0466de84319513af99490beefe753 \
+ --hash=sha256:27f9f8b8a442d489cf4ae5dae1e475430979e9fcfbaef5fd0810aac4206548a9 \
+ --hash=sha256:33e6ce72a4865b25f7f61e40e634382a7cfbe494785ce3c0aa94475b5e0f9252 \
+ --hash=sha256:48e0998068d785d3b78cc60a7e3d38fd71cfb942ce0a72ee046de38ec2bd6523 \
+ --hash=sha256:54dba98c12c6b77bd86b452b71c7f7f3b04027081f3451e18cdf2d926e4695e5 \
+ --hash=sha256:6021b64ec6ddf43ff5887e510f897468a281f1822feedf48edf86926288a48a7 \
+ --hash=sha256:709568c04f60f6dbee846a997a774ae28e12e809b4cbb12b6cd00767c111ede0 \
+ --hash=sha256:806d3295c75ae4f9bd53100b4cdd17c00b41e1606a6cff6faf1f599fdace7d71 \
+ --hash=sha256:894214560a6d70af6e88d8506f41f6911c31757a7799eced3322ca1fc4c76e4a \
+ --hash=sha256:8d0b29a92cf31c54ab21040e84a78420abbf59cdd73c30d05e0e70229b2e9348 \
+ --hash=sha256:977acfb5256e29bc9e14004a2350a7f8bb6a2c5d42ad74183fb64275b99af2fa \
+ --hash=sha256:9beca135489d56a89cf54cf3d324bcf8dd6c50cc9bdb76b9a97e8540935797b2 \
+ --hash=sha256:b58b658f3ea47b1b6deb8fab817bf68a9c7f00fde514ad344b9cef16d684206a \
+ --hash=sha256:e706eded76e0e8b09d2cf09cbab0a930f6550084a85950982b7c53f6a79a0451 \
+ --hash=sha256:ee421b9531279b88e54c4bd3ff41c538d1f563e75b1340a8eb2569c38949d230 \
+ --hash=sha256:fe4f48bb96fadef92697ccaeb98b22ff36b3add80e45ce48e0c67462846fd242 \
+ --hash=sha256:fef0b8a18a6376ff430e86bedcccb4018b344295a17584aaebf1b7339c92f505
+ # via -r requirements.in
+wheel==0.44.0 \
+ --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \
+ --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49
+ # via -r requirements.in
+
+# The following packages are considered to be unsafe in a requirements file:
+pip==24.2 \
+ --hash=sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2 \
+ --hash=sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8
+ # via -r requirements.in
+setuptools==73.0.1 \
+ --hash=sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e \
+ --hash=sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193
+ # via -r requirements.in
diff --git a/scripts/windows/build.bat b/scripts/windows/build.bat
index d58b783c..591834a6 100644
--- a/scripts/windows/build.bat
+++ b/scripts/windows/build.bat
@@ -15,40 +15,35 @@
setlocal ENABLEDELAYEDEXPANSION
-set CMAKE_GENERATOR="Visual Studio 16 2019"
+set CMAKE_GENERATOR="Visual Studio 17 2022"
set CONFIGURATION=RelWithDebInfo
set CRC32C_INSTALL_PREFIX=%cd%\build\%CONFIGURATION%
-@rem Path to cmake, env var to make it easier to point to a specific version
-set cmake=cmake
+@rem Iterate through supported Python versions.
+@rem Unfortunately pyenv for Windows has an out-of-date versions list. Choco's
+@rem installer seems to have some problems with installing multiple versions at
+@rem once, so as a workaround, we will install and then uninstall every version.
+FOR %%P IN (3.9, 3.10, 3.11, 3.12) DO (
-@rem python version should be set as an argument, if not, default to python 3.9
-set PYTHON_VERSION=%1
-if "%PYTHON_VERSION%"=="" (
- echo "Python version was not provided, using Python 3.9"
- set PYTHON_VERSION=3.9
+ echo "Installing Python version %%P"
+ choco install python --version=%%P -y --no-progress
-)
-
-if "%PYTHON_VERSION%"=="3.10-dev" (
- set PYTHON_VERSION=3.10
-)
-
-py -0
-py -%PYTHON_VERSION% -m pip install cmake
+ echo "Listing available Python versions'
+ py -0
-git submodule update --init --recursive
+ py -%%P-64 -m pip install --upgrade pip
-FOR %%V IN (32,64) DO (
- set TARGET_PLATFORM="x64"
+ echo "Installing cmake for Python %%P"
+ py -%%P-64 -m pip install cmake
- if "%%V"=="32" (
- set TARGET_PLATFORM="Win32"
- )
- echo "Target Platform: !TARGET_PLATFORM!"
+ @rem Add directory as safe to avoid "detected dubious ownership" fatal issue
+ git config --global --add safe.directory %cd%
+ git config --global --add safe.directory C:/tmpfs/src/github/python-crc32c
+ git submodule update --init --recursive
+ git config --global --add safe.directory %cd%\google_crc32c
+ git config --global --add safe.directory C:/tmpfs/src/github/python-crc32c/google_crc32c
pushd google_crc32c
-
@rem reset hard to cleanup any changes done by a previous build.
git reset --hard
git clean -fxd
@@ -59,11 +54,11 @@ FOR %%V IN (32,64) DO (
mkdir build
cd build
- echo "Running cmake with Generator: %CMAKE_GENERATOR%, Platform: !TARGET_PLATFORM!, Install Prefix: %CRC32C_INSTALL_PREFIX%"
+ echo "Running cmake with Generator: %CMAKE_GENERATOR%, Platform: x64, Install Prefix: %CRC32C_INSTALL_PREFIX%"
- %cmake% -G %CMAKE_GENERATOR% -A !TARGET_PLATFORM! -DCMAKE_BUILD_TYPE=Release -DCRC32C_BUILD_BENCHMARKS=no -DCRC32C_BUILD_TESTS=no -DBUILD_SHARED_LIBS=yes -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=yes -DCRC32C_USE_GLOG=0 -DCMAKE_INSTALL_PREFIX:PATH=%CRC32C_INSTALL_PREFIX% ..
+ py -%%P-64 -m cmake -G %CMAKE_GENERATOR% -A x64 -DCRC32C_BUILD_BENCHMARKS=no -DCRC32C_BUILD_TESTS=no -DBUILD_SHARED_LIBS=yes -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=yes -DCRC32C_USE_GLOG=0 -DCMAKE_INSTALL_PREFIX:PATH=%CRC32C_INSTALL_PREFIX% ..
- %cmake% --build . --config "%CONFIGURATION%" --target install
+ py -%%P-64 -m cmake --build . --config "%CONFIGURATION%" --target install
dir %CRC32C_INSTALL_PREFIX% /b /s
popd
@@ -72,9 +67,15 @@ FOR %%V IN (32,64) DO (
echo "Copying Binary to root: %CRC32C_INSTALL_PREFIX%\bin\crc32c.dll"
copy %CRC32C_INSTALL_PREFIX%\bin\crc32c.dll .
- py -%PYTHON_VERSION%-%%V -m pip install --upgrade pip setuptools wheel
+ py -%%P-64 -m pip install --upgrade pip setuptools wheel
echo "Building C extension"
- py -%PYTHON_VERSION%-%%V setup.py build_ext --include-dirs=%CRC32C_INSTALL_PREFIX%\include --library-dirs=%CRC32C_INSTALL_PREFIX%\lib
+ py -%%P-64 setup.py build_ext -v --include-dirs=%CRC32C_INSTALL_PREFIX%\include --library-dirs=%CRC32C_INSTALL_PREFIX%\lib
echo "Building Wheel"
- py -%PYTHON_VERSION%-%%V -m pip wheel . --wheel-dir wheels/
+ py -%%P-64 -m pip wheel . --wheel-dir wheels/
+
+ echo "Built wheel, now running tests."
+ call %~dp0/test.bat %%P || goto :error
+
+ echo "Finished with Python version %%P, now uninstalling"
+ choco uninstall python -y
)
diff --git a/scripts/windows/test.bat b/scripts/windows/test.bat
index dc169a5c..5e9d1280 100644
--- a/scripts/windows/test.bat
+++ b/scripts/windows/test.bat
@@ -12,20 +12,18 @@
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
-@rem python version should be set as an argument, if not, default to python 3.9
+@rem This test file runs for one Python version at a time, and is intended to
+@rem be called from within the build loop.
+
set PYTHON_VERSION=%1
if "%PYTHON_VERSION%"=="" (
- echo "Python version was not provided, using Python 3.9"
- set PYTHON_VERSION=3.9
+ echo "Python version was not provided, using Python 3.10"
+ set PYTHON_VERSION=3.10
)
-@rem update python deps and build wheels (requires CRC32C_INSTALL_PREFIX is set)
-@REM FOR %%V IN (3.9-64,3.9-32) DO (
-FOR %%V IN (%PYTHON_VERSION%-32, %PYTHON_VERSION%-64) DO (
- py -%%V -m pip install --no-index --find-links=wheels google-crc32c --force-reinstall
+py -%PYTHON_VERSION%-64 -m pip install --no-index --find-links=wheels google-crc32c --force-reinstall
- py -%%V ./scripts/check_crc32c_extension.py
+py -%PYTHON_VERSION%-64 ./scripts/check_crc32c_extension.py
- py -%%V -m pip install pytest
- py -%%V -m pytest tests
-)
+py -%PYTHON_VERSION%-64 -m pip install pytest
+py -%PYTHON_VERSION%-64 -m pytest tests
diff --git a/setup.cfg b/setup.cfg
index 4882f93e..2d2b3272 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,7 +14,7 @@
[metadata]
name = google-crc32c
-version = 1.5.0
+version = 1.6.0
description = A python wrapper of the C library 'Google CRC32C'
url = https://github.com/googleapis/python-crc32c
long_description = file: README.md
@@ -30,15 +30,14 @@ classifiers =
License :: OSI Approved :: Apache Software License
Operating System :: OS Independent
Programming Language :: Python :: 3
- Programming Language :: Python :: 3.7
- Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
+ Programming Language :: Python :: 3.12
[options]
zip_safe = True
-python_requires = >=3.7
+python_requires = >=3.9
[options.extras_require]
testing = pytest
diff --git a/setup.py b/setup.py
index 463498ae..5c304b20 100644
--- a/setup.py
+++ b/setup.py
@@ -29,7 +29,6 @@
def copy_dll(build_lib):
- return # trying static build of C library.
install_prefix = os.environ.get("CRC32C_INSTALL_PREFIX")
if os.name == "nt" and install_prefix is not None:
@@ -107,6 +106,7 @@ def build_c_extension():
package_dir={"": "src"},
ext_modules=[module],
cmdclass={"build_ext": BuildExtWithDLL},
+ install_requires=["importlib_resources>=1.3 ; python_version < '3.9' and os_name == 'nt'"],
)
diff --git a/src/google_crc32c/__config__.py b/src/google_crc32c/__config__.py
index 2bcf1227..f407db15 100644
--- a/src/google_crc32c/__config__.py
+++ b/src/google_crc32c/__config__.py
@@ -13,9 +13,6 @@
# limitations under the License.
import os
-import sys
-
-import pkg_resources
def modify_path():
@@ -29,13 +26,12 @@ def modify_path():
return
try:
- extra_dll_dir = pkg_resources.resource_filename("google_crc32c", "extra-dll")
+ from importlib.resources import files as _resources_files
+
+ extra_dll_dir = str(_resources_files("google_crc32c") / "extra-dll")
if os.path.isdir(extra_dll_dir):
- # Python 3.6, 3.7 use path
- os.environ["PATH"] = path + os.pathsep + extra_dll_dir
# Python 3.8+ uses add_dll_directory.
- if sys.version_info[0] == 3 and sys.version_info[1] >= 8:
- os.add_dll_directory(extra_dll_dir)
+ os.add_dll_directory(extra_dll_dir)
except ImportError:
pass