| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| """This script is used to synthesize generated parts of this library.""" |
|
|
| import re |
|
|
| import synthtool as s |
| import synthtool.gcp as gcp |
| from synthtool.languages import python |
|
|
| common = gcp.CommonTemplates() |
|
|
| default_version = "v1" |
|
|
| has_generator_updates = False |
| for library in s.get_staging_dirs(default_version): |
| |
| |
| |
|
|
| |
| s.replace( |
| library |
| / f"google/cloud/aiplatform_{library.name}/services/prediction_service/client.py", |
| "request.instances = instances", |
| "request.instances.extend(instances)", |
| ) |
|
|
| |
| |
| s.replace( |
| library |
| / f"tests/unit/gapic/aiplatform_{library.name}/test_prediction_service.py", |
| """def test_predict_flattened.*?def test_predict_flattened_error""", |
| "def test_predict_flattened_error", |
| flags=re.MULTILINE | re.DOTALL, |
| ) |
|
|
| |
| |
| s.replace( |
| library |
| / f"tests/unit/gapic/aiplatform_{library.name}/test_prediction_service.py", |
| """def test_explain_flattened.*?def test_explain_flattened_error""", |
| "def test_explain_flattened_error", |
| flags=re.MULTILINE | re.DOTALL, |
| ) |
|
|
| s.move( |
| library, |
| excludes=[ |
| ".coveragerc", |
| ".pre-commit-config.yaml", |
| "setup.py", |
| "README.rst", |
| "docs/index.rst", |
| "docs/summary_overview.md", |
| f"docs/definition_{library.name}/services.rst", |
| f"docs/instance_{library.name}/services.rst", |
| f"docs/params_{library.name}/services.rst", |
| f"docs/prediction_{library.name}/services.rst", |
| f"scripts/fixup_aiplatform_{library.name}_keywords.py", |
| f"scripts/fixup_definition_{library.name}_keywords.py", |
| f"scripts/fixup_instance_{library.name}_keywords.py", |
| f"scripts/fixup_params_{library.name}_keywords.py", |
| f"scripts/fixup_prediction_{library.name}_keywords.py", |
| "google/cloud/aiplatform/__init__.py", |
| f"google/cloud/aiplatform/{library.name}/schema/**/services/", |
| "**/gapic_version.py", |
| ".kokoro/samples", |
| "noxfile.py", |
| "testing", |
| "docs/conf.py", |
| ], |
| ) |
| has_generator_updates = True |
|
|
| s.remove_staging_dirs() |
|
|
| |
| if has_generator_updates: |
| |
| |
| |
|
|
| templated_files = common.py_library( |
| cov_level=98, |
| system_test_python_versions=["3.8"], |
| unit_test_python_versions=["3.8", "3.9", "3.10", "3.11", "3.12"], |
| unit_test_extras=["testing"], |
| system_test_extras=["testing"], |
| microgenerator=True, |
| ) |
| s.move( |
| templated_files, |
| excludes=[ |
| ".coveragerc", |
| ".pre-commit-config.yaml", |
| ".kokoro/continuous/common.cfg", |
| ".kokoro/presubmit/presubmit.cfg", |
| ".kokoro/continuous/prerelease-deps.cfg", |
| ".kokoro/presubmit/prerelease-deps.cfg", |
| ".kokoro/docs/docs-presubmit.cfg", |
| ".kokoro/build.sh", |
| ".kokoro/release.sh", |
| ".kokoro/release/common.cfg", |
| ".kokoro/requirements*", |
| |
| |
| ".kokoro/samples/python3.7/common.cfg", |
| ".kokoro/samples/python3.8/common.cfg", |
| ".kokoro/samples/python3.9/common.cfg", |
| ".kokoro/samples/python3.10/common.cfg", |
| ".kokoro/samples/python3.11/common.cfg", |
| ".kokoro/samples/python3.12/common.cfg", |
| ".kokoro/samples/python3.7/periodic.cfg", |
| ".kokoro/samples/python3.8/periodic.cfg", |
| ".kokoro/samples/python3.9/periodic.cfg", |
| ".kokoro/samples/python3.10/periodic.cfg", |
| ".kokoro/samples/python3.11/periodic.cfg", |
| ".kokoro/samples/python3.12/periodic.cfg", |
| ".github/CODEOWNERS", |
| ".github/PULL_REQUEST_TEMPLATE.md", |
| ".github/workflows", |
| "README.rst", |
| ".github/release-please.yml", |
| "noxfile.py", |
| "testing", |
| "docs/conf.py", |
| ], |
| ) |
|
|
| python.py_samples(skip_readmes=True) |
|
|
| python.configure_previous_major_version_branches() |
|
|
| |
| s.replace( |
| ".kokoro/samples/python3.*/common.cfg", |
| """env_vars: \{ |
| key: "BUILD_SPECIFIC_GCLOUD_PROJECT" |
| value: "python-docs-samples-tests-.*?" |
| \}""", |
| """env_vars: { |
| key: "BUILD_SPECIFIC_GCLOUD_PROJECT" |
| value: "ucaip-sample-tests" |
| }""", |
| ) |
|
|
| s.replace( |
| ".kokoro/test-samples-impl.sh", |
| "python3.9", |
| "python3", |
| ) |
|
|
| |
| s.replace( |
| ".kokoro/publish-docs.sh", |
| "# build docs", |
| """\ |
| # build Gemini docs |
| nox -s gemini_docs |
| # create metadata |
| python3 -m docuploader create-metadata \\ |
| --name="vertexai" \\ |
| --version=$(python3 setup.py --version) \\ |
| --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \\ |
| --distribution-name="google-cloud-vertexai" \\ |
| --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \\ |
| --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \\ |
| --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) |
| cat docs.metadata |
| # upload docs |
| python3 -m docuploader upload gemini_docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" |
| # Gemini docfx yaml files |
| nox -s gemini_docfx |
| # create metadata. |
| python3 -m docuploader create-metadata \\ |
| --name="vertexai" \\ |
| --version=$(python3 setup.py --version) \\ |
| --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \\ |
| --distribution-name="google-cloud-vertexai" \\ |
| --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \\ |
| --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \\ |
| --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) \\ |
| --stem="/vertex-ai/generative-ai/docs/reference/python" |
| cat docs.metadata |
| # upload docs |
| python3 -m docuploader upload gemini_docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" |
| # build docs""", |
| ) |
|
|
| s.shell.run(["nox", "-s", "blacken"], hide_output=False) |
|
|