diff --git a/.coveragerc b/.coveragerc
index d9205f04..711d95bf 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,44 +1,20 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
[report]
fail_under = 100
show_missing = True
+omit =
+ google/cloud/translate/__init__.py
+ google/cloud/translate_v2/__init__.py
+ google/cloud/__init__.py
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
# Ignore debug-only repr
def __repr__
- # Ignore abstract methods
- raise NotImplementedError
# Ignore pkg_resources exceptions.
# This is added at the module level as a safeguard for if someone
# generates the code and tries to run it without pip installing. This
# makes it virtually impossible to test properly.
except pkg_resources.DistributionNotFound
-omit =
- */gapic/*.py
- */proto/*.py
- */core/*.py
- */site-packages/*.py
- google/cloud/__init__.py
- google/cloud/translate.py
- google/cloud/translate/__init__.py
- google/cloud/translation*/*.py
\ No newline at end of file
diff --git a/.flake8 b/.flake8
index ed931638..29227d4c 100644
--- a/.flake8
+++ b/.flake8
@@ -26,6 +26,7 @@ exclude =
*_pb2.py
# Standard linting exemptions.
+ **/.nox/**
__pycache__,
.git,
*.pyc,
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f8d4cbae..cb787cdf 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -7,6 +7,8 @@
# The @googleapis/yoshi-python is the default owner for changes in this repo
* @googleapis/yoshi-python
+# The @googleapis/cdpe-cloudai own the samples
+/samples/**/*.py @googleapis/cdpe-cloudai
# The python-samples-owners team is the default owner for samples
/samples/**/*.py @telpirion @sirtorry @googleapis/python-samples-owners
\ No newline at end of file
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 00000000..fc281c05
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
new file mode 100644
index 00000000..af599353
--- /dev/null
+++ b/.github/sync-repo-settings.yaml
@@ -0,0 +1,13 @@
+# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings
+# Rules for master branch protection
+branchProtectionRules:
+# Identifies the protection rule pattern. Name of the branch to be protected.
+# Defaults to `master`
+- pattern: master
+ requiredStatusCheckContexts:
+ - 'Kokoro'
+ - 'cla/google'
+ - 'Samples - Lint'
+ - 'Samples - Python 3.6'
+ - 'Samples - Python 3.7'
+ - 'Samples - Python 3.8'
diff --git a/.gitignore b/.gitignore
index b9daa52f..b4243ced 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index e4e84c42..862a763d 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,7 +15,11 @@
set -eo pipefail
-cd github/python-translate
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-translate"
+fi
+
+cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3.6 -m nox -s "${NOX_SESSION:-}"
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3.6 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
index 11181078..a7a40a92 100644
--- a/.kokoro/docs/docs-presubmit.cfg
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -15,3 +15,14 @@ env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
value: "false"
}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-translate/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 00000000..f9cfcd33
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 00000000..f9cfcd33
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 00000000..f9cfcd33
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/google/cloud/translate.py b/.kokoro/test-samples-against-head.sh
old mode 100644
new mode 100755
similarity index 57%
rename from google/cloud/translate.py
rename to .kokoro/test-samples-against-head.sh
index 7b5ea626..e1137ce0
--- a/google/cloud/translate.py
+++ b/.kokoro/test-samples-against-head.sh
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+#!/bin/bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,14 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
-from __future__ import absolute_import
-
-from google.cloud.translate_v3 import TranslationServiceClient
-from google.cloud.translate_v3 import types
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+cd github/python-translate
-__all__ = (
- "types",
- "TranslationServiceClient",
-)
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 00000000..cf5de74c
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 6409c76b..71a494e3 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
# `-e` enables the script to automatically fail when a command fails
# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
@@ -24,87 +28,19 @@ cd github/python-translate
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
LATEST_RELEASE=$(git describe --abbrev=0 --tags)
git checkout $LATEST_RELEASE
-fi
-
-# Exit early if samples directory doesn't exist
-if [ ! -d "./samples" ]; then
- echo "No tests run. `./samples` not found"
- exit 0
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the Build Cop Bot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
- $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
fi
+fi
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index 719bcd5b..4af6cdc2 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
"KOKORO_GITHUB_COMMIT"
"KOKORO_GITHUB_PULL_REQUEST_NUMBER"
"KOKORO_GITHUB_PULL_REQUEST_COMMIT"
- # For Build Cop Bot
+ # For FlakyBot
"KOKORO_GITHUB_COMMIT_URL"
"KOKORO_GITHUB_PULL_REQUEST_URL"
)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..32302e48
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v3.4.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+- repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+- repo: https://gitlab.com/pycqa/flake8
+ rev: 3.9.0
+ hooks:
+ - id: flake8
diff --git a/.trampolinerc b/.trampolinerc
index 995ee291..383b6ec8 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,6 +24,7 @@ required_envvars+=(
pass_down_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
+ "NOX_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e0644800..06748f10 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,21 @@
[1]: https://pypi.org/project/google-cloud-translate/#history
+## [3.1.0](https://www.github.com/googleapis/python-translate/compare/v3.0.2...v3.1.0) (2021-03-31)
+
+
+### Features
+
+* add `from_service_account_info` ([a397eff](https://www.github.com/googleapis/python-translate/commit/a397effb87f74f579605bcf261bf2b00d5e9fa5b))
+* **v3beta1:** add online and batch document translation ([a397eff](https://www.github.com/googleapis/python-translate/commit/a397effb87f74f579605bcf261bf2b00d5e9fa5b))
+* **v3beta1:** add online and batch document translation ([#121](https://www.github.com/googleapis/python-translate/issues/121)) ([a397eff](https://www.github.com/googleapis/python-translate/commit/a397effb87f74f579605bcf261bf2b00d5e9fa5b))
+
+
+### Bug Fixes
+
+* moves region tags ([#103](https://www.github.com/googleapis/python-translate/issues/103)) ([e161eb5](https://www.github.com/googleapis/python-translate/commit/e161eb5e9cdc3124aa7efe2d535bae67812ae93c))
+* use correct retry deadlines ([a397eff](https://www.github.com/googleapis/python-translate/commit/a397effb87f74f579605bcf261bf2b00d5e9fa5b))
+
### [3.0.2](https://www.github.com/googleapis/python-translate/compare/v3.0.1...v3.0.2) (2020-12-09)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 0d2e4022..86915e5b 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -21,8 +21,8 @@ In order to add a feature:
- The feature must be documented in both the API and narrative
documentation.
-- The feature must work fully on the following CPython versions: 2.7,
- 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows.
+- The feature must work fully on the following CPython versions:
+ 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -80,25 +85,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
@@ -112,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
-- PEP8 compliance, with exceptions defined in the linter configuration.
+ $ nox -s blacken
+
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -130,6 +120,16 @@ Coding Style
should point to the official ``googleapis`` checkout and the
the branch should be the main branch on that remote (``master``).
+- This repository contains configuration for the
+ `pre-commit `__ tool, which automates checking
+ our linters during a commit. If you have it installed on your ``$PATH``,
+ you can enable enforcing those checks via:
+
+.. code-block:: bash
+
+ $ pre-commit install
+ pre-commit installed at .git/hooks/pre-commit
+
Exceptions to PEP8:
- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
@@ -142,13 +142,18 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
@@ -211,25 +216,24 @@ Supported Python Versions
We support:
-- `Python 3.5`_
- `Python 3.6`_
- `Python 3.7`_
- `Python 3.8`_
+- `Python 3.9`_
-.. _Python 3.5: https://docs.python.org/3.5/
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
+.. _Python 3.9: https://docs.python.org/3.9/
Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-translate/blob/master/noxfile.py
-Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
We also explicitly decided to support Python 3 beginning with version
-3.5. Reasons for this include:
+3.6. Reasons for this include:
- Encouraging use of newest versions of Python 3
- Taking the lead of `prominent`_ open-source `projects`_
diff --git a/LICENSE b/LICENSE
index a8ee855d..d6456956 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
- Apache License
+
+ Apache License
Version 2.0, January 2004
- https://www.apache.org/licenses/
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d12..e783f4c6 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/UPGRADING.md b/UPGRADING.md
index 7c564789..48fd3f7c 100644
--- a/UPGRADING.md
+++ b/UPGRADING.md
@@ -17,10 +17,10 @@ The 3.0.0 release requires Python 3.6+.
Methods expect request objects. We provide a script that will convert most common use cases.
-* Install the library
+* Install the library and `libcst`.
```py
-python3 -m pip install google-cloud-translate
+python3 -m pip install google-cloud-translate libcst
```
* The script `fixup_translation_{version}_keywords.py` is shipped with the library. It expects
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 0abaf229..bcd37bbd 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,9 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
diff --git a/docs/conf.py b/docs/conf.py
index 51457137..6fac3349 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -345,10 +345,11 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- "python": ("http://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
- "grpc": ("https://grpc.io/grpc/python/", None),
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/docs/translate_v3/services.rst b/docs/translate_v3/services.rst
index ce46a05d..f457bf2c 100644
--- a/docs/translate_v3/services.rst
+++ b/docs/translate_v3/services.rst
@@ -1,6 +1,6 @@
Services for Google Cloud Translate v3 API
==========================================
+.. toctree::
+ :maxdepth: 2
-.. automodule:: google.cloud.translate_v3.services.translation_service
- :members:
- :inherited-members:
+ translation_service
diff --git a/docs/translate_v3/translation_service.rst b/docs/translate_v3/translation_service.rst
new file mode 100644
index 00000000..0a3597fd
--- /dev/null
+++ b/docs/translate_v3/translation_service.rst
@@ -0,0 +1,11 @@
+TranslationService
+------------------------------------
+
+.. automodule:: google.cloud.translate_v3.services.translation_service
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.translate_v3.services.translation_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/translate_v3/types.rst b/docs/translate_v3/types.rst
index f84d4f23..35bd3cbf 100644
--- a/docs/translate_v3/types.rst
+++ b/docs/translate_v3/types.rst
@@ -3,3 +3,5 @@ Types for Google Cloud Translate v3 API
.. automodule:: google.cloud.translate_v3.types
:members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/translate_v3beta1/services.rst b/docs/translate_v3beta1/services.rst
index 4fc95083..2e364924 100644
--- a/docs/translate_v3beta1/services.rst
+++ b/docs/translate_v3beta1/services.rst
@@ -1,6 +1,6 @@
Services for Google Cloud Translate v3beta1 API
===============================================
+.. toctree::
+ :maxdepth: 2
-.. automodule:: google.cloud.translate_v3beta1.services.translation_service
- :members:
- :inherited-members:
+ translation_service
diff --git a/docs/translate_v3beta1/translation_service.rst b/docs/translate_v3beta1/translation_service.rst
new file mode 100644
index 00000000..cfe3524b
--- /dev/null
+++ b/docs/translate_v3beta1/translation_service.rst
@@ -0,0 +1,11 @@
+TranslationService
+------------------------------------
+
+.. automodule:: google.cloud.translate_v3beta1.services.translation_service
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.translate_v3beta1.services.translation_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/translate_v3beta1/types.rst b/docs/translate_v3beta1/types.rst
index 32f1a720..f5d6b49a 100644
--- a/docs/translate_v3beta1/types.rst
+++ b/docs/translate_v3beta1/types.rst
@@ -3,3 +3,5 @@ Types for Google Cloud Translate v3beta1 API
.. automodule:: google.cloud.translate_v3beta1.types
:members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/google/cloud/translate_v3/services/translation_service/async_client.py b/google/cloud/translate_v3/services/translation_service/async_client.py
index 8d2c52b9..d293e203 100644
--- a/google/cloud/translate_v3/services/translation_service/async_client.py
+++ b/google/cloud/translate_v3/services/translation_service/async_client.py
@@ -50,9 +50,76 @@ class TranslationServiceAsyncClient:
glossary_path = staticmethod(TranslationServiceClient.glossary_path)
parse_glossary_path = staticmethod(TranslationServiceClient.parse_glossary_path)
- from_service_account_file = TranslationServiceClient.from_service_account_file
+ common_billing_account_path = staticmethod(
+ TranslationServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ TranslationServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(TranslationServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ TranslationServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ TranslationServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ TranslationServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(TranslationServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ TranslationServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(TranslationServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ TranslationServiceClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranslationServiceAsyncClient: The constructed client.
+ """
+ return TranslationServiceClient.from_service_account_info.__func__(TranslationServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranslationServiceAsyncClient: The constructed client.
+ """
+ return TranslationServiceClient.from_service_account_file.__func__(TranslationServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> TranslationServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ TranslationServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(TranslationServiceClient).get_transport_class,
type(TranslationServiceClient),
@@ -123,7 +190,7 @@ async def translate_text(
r"""Translates input text and returns translated text.
Args:
- request (:class:`~.translation_service.TranslateTextRequest`):
+ request (:class:`google.cloud.translate_v3.types.TranslateTextRequest`):
The request object. The request message for synchronous
translation.
parent (:class:`str`):
@@ -143,6 +210,7 @@ async def translate_text(
Models and glossaries must be within the same region
(have same location-id), otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -151,6 +219,7 @@ async def translate_text(
use for translation of the input text,
set to one of the language codes listed
in Language Support.
+
This corresponds to the ``target_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -159,6 +228,7 @@ async def translate_text(
string format. We recommend the total
content be less than 30k codepoints. Use
BatchTranslateText for larger text.
+
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -181,6 +251,7 @@ async def translate_text(
If missing, the system decides which google base model
to use.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -189,6 +260,7 @@ async def translate_text(
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
+
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -201,6 +273,7 @@ async def translate_text(
API attempts to identify the source
language automatically and returns the
source language within the response.
+
This corresponds to the ``source_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -212,13 +285,13 @@ async def translate_text(
sent along with the request as metadata.
Returns:
- ~.translation_service.TranslateTextResponse:
+ google.cloud.translate_v3.types.TranslateTextResponse:
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any(
+ has_flattened_params = any(
[
parent,
target_language_code,
@@ -227,7 +300,8 @@ async def translate_text(
mime_type,
source_language_code,
]
- ):
+ )
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -242,8 +316,6 @@ async def translate_text(
request.parent = parent
if target_language_code is not None:
request.target_language_code = target_language_code
- if contents is not None:
- request.contents = contents
if model is not None:
request.model = model
if mime_type is not None:
@@ -251,6 +323,9 @@ async def translate_text(
if source_language_code is not None:
request.source_language_code = source_language_code
+ if contents:
+ request.contents.extend(contents)
+
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
@@ -286,7 +361,7 @@ async def detect_language(
r"""Detects the language of text within a request.
Args:
- request (:class:`~.translation_service.DetectLanguageRequest`):
+ request (:class:`google.cloud.translate_v3.types.DetectLanguageRequest`):
The request object. The request message for language
detection.
parent (:class:`str`):
@@ -304,6 +379,7 @@ async def detect_language(
Only models within the same region (has same
location-id) can be used. Otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -318,6 +394,7 @@ async def detect_language(
``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``.
If not specified, the default model is used.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -326,12 +403,14 @@ async def detect_language(
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
+
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
content (:class:`str`):
The content of the input stored as a
string.
+
This corresponds to the ``content`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -343,7 +422,7 @@ async def detect_language(
sent along with the request as metadata.
Returns:
- ~.translation_service.DetectLanguageResponse:
+ google.cloud.translate_v3.types.DetectLanguageResponse:
The response message for language
detection.
@@ -351,7 +430,8 @@ async def detect_language(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, model, mime_type, content]):
+ has_flattened_params = any([parent, model, mime_type, content])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -406,7 +486,7 @@ async def get_supported_languages(
translation.
Args:
- request (:class:`~.translation_service.GetSupportedLanguagesRequest`):
+ request (:class:`google.cloud.translate_v3.types.GetSupportedLanguagesRequest`):
The request object. The request message for discovering
supported languages.
parent (:class:`str`):
@@ -425,6 +505,7 @@ async def get_supported_languages(
Only models within the same region (have same
location-id) can be used, otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -443,6 +524,7 @@ async def get_supported_languages(
Returns languages supported by the specified model. If
missing, we get supported languages of Google general
base (PBMT) model.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -452,6 +534,7 @@ async def get_supported_languages(
of supported languages. If missing, then
display names are not returned in a
response.
+
This corresponds to the ``display_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -463,7 +546,7 @@ async def get_supported_languages(
sent along with the request as metadata.
Returns:
- ~.translation_service.SupportedLanguages:
+ google.cloud.translate_v3.types.SupportedLanguages:
The response message for discovering
supported languages.
@@ -471,7 +554,8 @@ async def get_supported_languages(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, model, display_language_code]):
+ has_flattened_params = any([parent, model, display_language_code])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -500,6 +584,7 @@ async def get_supported_languages(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -536,7 +621,7 @@ async def batch_translate_text(
of the call.
Args:
- request (:class:`~.translation_service.BatchTranslateTextRequest`):
+ request (:class:`google.cloud.translate_v3.types.BatchTranslateTextRequest`):
The request object. The batch translation request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -546,15 +631,13 @@ async def batch_translate_text(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.BatchTranslateResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by BatchTranslateText if at least one
- sentence is translated successfully.
+ The result type for the operation will be :class:`google.cloud.translate_v3.types.BatchTranslateResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateText if at least one
+ sentence is translated successfully.
"""
# Create or coerce a protobuf request object.
@@ -603,14 +686,14 @@ async def create_glossary(
Returns NOT_FOUND, if the project doesn't exist.
Args:
- request (:class:`~.translation_service.CreateGlossaryRequest`):
+ request (:class:`google.cloud.translate_v3.types.CreateGlossaryRequest`):
The request object. Request message for CreateGlossary.
parent (:class:`str`):
Required. The project name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- glossary (:class:`~.translation_service.Glossary`):
+ glossary (:class:`google.cloud.translate_v3.types.Glossary`):
Required. The glossary to create.
This corresponds to the ``glossary`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -623,18 +706,19 @@ async def create_glossary(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.translation_service.Glossary``: Represents a
- glossary built from user provided data.
+ :class:`google.cloud.translate_v3.types.Glossary`
+ Represents a glossary built from user provided data.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, glossary]):
+ has_flattened_params = any([parent, glossary])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -691,12 +775,13 @@ async def list_glossaries(
doesn't exist.
Args:
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (:class:`google.cloud.translate_v3.types.ListGlossariesRequest`):
The request object. Request message for ListGlossaries.
parent (:class:`str`):
Required. The name of the project
from which to list all of the
glossaries.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -708,7 +793,7 @@ async def list_glossaries(
sent along with the request as metadata.
Returns:
- ~.pagers.ListGlossariesAsyncPager:
+ google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesAsyncPager:
Response message for ListGlossaries.
Iterating over this object will yield
results and resolve additional pages
@@ -718,7 +803,8 @@ async def list_glossaries(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -743,6 +829,7 @@ async def list_glossaries(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -779,11 +866,12 @@ async def get_glossary(
exist.
Args:
- request (:class:`~.translation_service.GetGlossaryRequest`):
+ request (:class:`google.cloud.translate_v3.types.GetGlossaryRequest`):
The request object. Request message for GetGlossary.
name (:class:`str`):
Required. The name of the glossary to
retrieve.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -795,7 +883,7 @@ async def get_glossary(
sent along with the request as metadata.
Returns:
- ~.translation_service.Glossary:
+ google.cloud.translate_v3.types.Glossary:
Represents a glossary built from user
provided data.
@@ -803,7 +891,8 @@ async def get_glossary(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -828,6 +917,7 @@ async def get_glossary(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -859,11 +949,12 @@ async def delete_glossary(
doesn't exist.
Args:
- request (:class:`~.translation_service.DeleteGlossaryRequest`):
+ request (:class:`google.cloud.translate_v3.types.DeleteGlossaryRequest`):
The request object. Request message for DeleteGlossary.
name (:class:`str`):
Required. The name of the glossary to
delete.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -875,20 +966,19 @@ async def delete_glossary(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.DeleteGlossaryResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by DeleteGlossary.
+ The result type for the operation will be :class:`google.cloud.translate_v3.types.DeleteGlossaryResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by DeleteGlossary.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -913,6 +1003,7 @@ async def delete_glossary(
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
diff --git a/google/cloud/translate_v3/services/translation_service/client.py b/google/cloud/translate_v3/services/translation_service/client.py
index af612268..c4637e75 100644
--- a/google/cloud/translate_v3/services/translation_service/client.py
+++ b/google/cloud/translate_v3/services/translation_service/client.py
@@ -115,6 +115,22 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranslationServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -127,7 +143,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ TranslationServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -135,6 +151,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> TranslationServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ TranslationServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def glossary_path(project: str, location: str, glossary: str,) -> str:
"""Return a fully-qualified glossary string."""
@@ -151,6 +176,65 @@ def parse_glossary_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
@@ -167,10 +251,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.TranslationServiceTransport]): The
+ transport (Union[str, TranslationServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -186,10 +270,10 @@ def __init__(
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -206,21 +290,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -263,7 +343,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -285,10 +365,10 @@ def translate_text(
r"""Translates input text and returns translated text.
Args:
- request (:class:`~.translation_service.TranslateTextRequest`):
+ request (google.cloud.translate_v3.types.TranslateTextRequest):
The request object. The request message for synchronous
translation.
- parent (:class:`str`):
+ parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
@@ -305,26 +385,29 @@ def translate_text(
Models and glossaries must be within the same region
(have same location-id), otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- target_language_code (:class:`str`):
+ target_language_code (str):
Required. The BCP-47 language code to
use for translation of the input text,
set to one of the language codes listed
in Language Support.
+
This corresponds to the ``target_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- contents (:class:`Sequence[str]`):
+ contents (Sequence[str]):
Required. The content of the input in
string format. We recommend the total
content be less than 30k codepoints. Use
BatchTranslateText for larger text.
+
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- model (:class:`str`):
+ model (str):
Optional. The ``model`` type requested for this
translation.
@@ -343,18 +426,20 @@ def translate_text(
If missing, the system decides which google base model
to use.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- mime_type (:class:`str`):
+ mime_type (str):
Optional. The format of the source
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
+
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- source_language_code (:class:`str`):
+ source_language_code (str):
Optional. The BCP-47 language code of
the input text if known, for example,
"en-US" or "sr-Latn". Supported language
@@ -363,6 +448,7 @@ def translate_text(
API attempts to identify the source
language automatically and returns the
source language within the response.
+
This corresponds to the ``source_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -374,7 +460,7 @@ def translate_text(
sent along with the request as metadata.
Returns:
- ~.translation_service.TranslateTextResponse:
+ google.cloud.translate_v3.types.TranslateTextResponse:
"""
# Create or coerce a protobuf request object.
@@ -450,10 +536,10 @@ def detect_language(
r"""Detects the language of text within a request.
Args:
- request (:class:`~.translation_service.DetectLanguageRequest`):
+ request (google.cloud.translate_v3.types.DetectLanguageRequest):
The request object. The request message for language
detection.
- parent (:class:`str`):
+ parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
@@ -468,10 +554,11 @@ def detect_language(
Only models within the same region (has same
location-id) can be used. Otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- model (:class:`str`):
+ model (str):
Optional. The language detection model to be used.
Format:
@@ -482,20 +569,23 @@ def detect_language(
``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``.
If not specified, the default model is used.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- mime_type (:class:`str`):
+ mime_type (str):
Optional. The format of the source
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
+
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- content (:class:`str`):
+ content (str):
The content of the input stored as a
string.
+
This corresponds to the ``content`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -507,7 +597,7 @@ def detect_language(
sent along with the request as metadata.
Returns:
- ~.translation_service.DetectLanguageResponse:
+ google.cloud.translate_v3.types.DetectLanguageResponse:
The response message for language
detection.
@@ -572,10 +662,10 @@ def get_supported_languages(
translation.
Args:
- request (:class:`~.translation_service.GetSupportedLanguagesRequest`):
+ request (google.cloud.translate_v3.types.GetSupportedLanguagesRequest):
The request object. The request message for discovering
supported languages.
- parent (:class:`str`):
+ parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
@@ -591,10 +681,11 @@ def get_supported_languages(
Only models within the same region (have same
location-id) can be used, otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- model (:class:`str`):
+ model (str):
Optional. Get supported languages of this model.
The format depends on model type:
@@ -609,15 +700,17 @@ def get_supported_languages(
Returns languages supported by the specified model. If
missing, we get supported languages of Google general
base (PBMT) model.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- display_language_code (:class:`str`):
+ display_language_code (str):
Optional. The language to use to
return localized, human readable names
of supported languages. If missing, then
display names are not returned in a
response.
+
This corresponds to the ``display_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -629,7 +722,7 @@ def get_supported_languages(
sent along with the request as metadata.
Returns:
- ~.translation_service.SupportedLanguages:
+ google.cloud.translate_v3.types.SupportedLanguages:
The response message for discovering
supported languages.
@@ -696,7 +789,7 @@ def batch_translate_text(
of the call.
Args:
- request (:class:`~.translation_service.BatchTranslateTextRequest`):
+ request (google.cloud.translate_v3.types.BatchTranslateTextRequest):
The request object. The batch translation request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -706,15 +799,13 @@ def batch_translate_text(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.BatchTranslateResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by BatchTranslateText if at least one
- sentence is translated successfully.
+ The result type for the operation will be :class:`google.cloud.translate_v3.types.BatchTranslateResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateText if at least one
+ sentence is translated successfully.
"""
# Create or coerce a protobuf request object.
@@ -764,14 +855,14 @@ def create_glossary(
Returns NOT_FOUND, if the project doesn't exist.
Args:
- request (:class:`~.translation_service.CreateGlossaryRequest`):
+ request (google.cloud.translate_v3.types.CreateGlossaryRequest):
The request object. Request message for CreateGlossary.
- parent (:class:`str`):
+ parent (str):
Required. The project name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- glossary (:class:`~.translation_service.Glossary`):
+ glossary (google.cloud.translate_v3.types.Glossary):
Required. The glossary to create.
This corresponds to the ``glossary`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -784,12 +875,12 @@ def create_glossary(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.translation_service.Glossary``: Represents a
- glossary built from user provided data.
+ :class:`google.cloud.translate_v3.types.Glossary`
+ Represents a glossary built from user provided data.
"""
# Create or coerce a protobuf request object.
@@ -854,12 +945,13 @@ def list_glossaries(
doesn't exist.
Args:
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (google.cloud.translate_v3.types.ListGlossariesRequest):
The request object. Request message for ListGlossaries.
- parent (:class:`str`):
+ parent (str):
Required. The name of the project
from which to list all of the
glossaries.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -871,7 +963,7 @@ def list_glossaries(
sent along with the request as metadata.
Returns:
- ~.pagers.ListGlossariesPager:
+ google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesPager:
Response message for ListGlossaries.
Iterating over this object will yield
results and resolve additional pages
@@ -936,11 +1028,12 @@ def get_glossary(
exist.
Args:
- request (:class:`~.translation_service.GetGlossaryRequest`):
+ request (google.cloud.translate_v3.types.GetGlossaryRequest):
The request object. Request message for GetGlossary.
- name (:class:`str`):
+ name (str):
Required. The name of the glossary to
retrieve.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -952,7 +1045,7 @@ def get_glossary(
sent along with the request as metadata.
Returns:
- ~.translation_service.Glossary:
+ google.cloud.translate_v3.types.Glossary:
Represents a glossary built from user
provided data.
@@ -1010,11 +1103,12 @@ def delete_glossary(
doesn't exist.
Args:
- request (:class:`~.translation_service.DeleteGlossaryRequest`):
+ request (google.cloud.translate_v3.types.DeleteGlossaryRequest):
The request object. Request message for DeleteGlossary.
- name (:class:`str`):
+ name (str):
Required. The name of the glossary to
delete.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -1026,14 +1120,12 @@ def delete_glossary(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.DeleteGlossaryResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by DeleteGlossary.
+ The result type for the operation will be :class:`google.cloud.translate_v3.types.DeleteGlossaryResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by DeleteGlossary.
"""
# Create or coerce a protobuf request object.
diff --git a/google/cloud/translate_v3/services/translation_service/pagers.py b/google/cloud/translate_v3/services/translation_service/pagers.py
index 51347139..d6d24694 100644
--- a/google/cloud/translate_v3/services/translation_service/pagers.py
+++ b/google/cloud/translate_v3/services/translation_service/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.translate_v3.types import translation_service
@@ -24,7 +33,7 @@ class ListGlossariesPager:
"""A pager for iterating through ``list_glossaries`` requests.
This class thinly wraps an initial
- :class:`~.translation_service.ListGlossariesResponse` object, and
+ :class:`google.cloud.translate_v3.types.ListGlossariesResponse` object, and
provides an ``__iter__`` method to iterate through its
``glossaries`` field.
@@ -33,7 +42,7 @@ class ListGlossariesPager:
through the ``glossaries`` field on the
corresponding responses.
- All the usual :class:`~.translation_service.ListGlossariesResponse`
+ All the usual :class:`google.cloud.translate_v3.types.ListGlossariesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -51,9 +60,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (google.cloud.translate_v3.types.ListGlossariesRequest):
The initial request object.
- response (:class:`~.translation_service.ListGlossariesResponse`):
+ response (google.cloud.translate_v3.types.ListGlossariesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -86,7 +95,7 @@ class ListGlossariesAsyncPager:
"""A pager for iterating through ``list_glossaries`` requests.
This class thinly wraps an initial
- :class:`~.translation_service.ListGlossariesResponse` object, and
+ :class:`google.cloud.translate_v3.types.ListGlossariesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``glossaries`` field.
@@ -95,7 +104,7 @@ class ListGlossariesAsyncPager:
through the ``glossaries`` field on the
corresponding responses.
- All the usual :class:`~.translation_service.ListGlossariesResponse`
+ All the usual :class:`google.cloud.translate_v3.types.ListGlossariesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -113,9 +122,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (google.cloud.translate_v3.types.ListGlossariesRequest):
The initial request object.
- response (:class:`~.translation_service.ListGlossariesResponse`):
+ response (google.cloud.translate_v3.types.ListGlossariesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/translate_v3/services/translation_service/transports/__init__.py b/google/cloud/translate_v3/services/translation_service/transports/__init__.py
index b977312d..92859637 100644
--- a/google/cloud/translate_v3/services/translation_service/transports/__init__.py
+++ b/google/cloud/translate_v3/services/translation_service/transports/__init__.py
@@ -30,7 +30,6 @@
_transport_registry["grpc"] = TranslationServiceGrpcTransport
_transport_registry["grpc_asyncio"] = TranslationServiceGrpcAsyncIOTransport
-
__all__ = (
"TranslationServiceTransport",
"TranslationServiceGrpcTransport",
diff --git a/google/cloud/translate_v3/services/translation_service/transports/base.py b/google/cloud/translate_v3/services/translation_service/transports/base.py
index 204e32ec..1107abf8 100644
--- a/google/cloud/translate_v3/services/translation_service/transports/base.py
+++ b/google/cloud/translate_v3/services/translation_service/transports/base.py
@@ -72,10 +72,10 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -83,6 +83,9 @@ def __init__(
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
@@ -92,20 +95,17 @@ def __init__(
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ scopes=self._scopes, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -124,6 +124,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -145,6 +146,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -158,6 +160,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -171,6 +174,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
diff --git a/google/cloud/translate_v3/services/translation_service/transports/grpc.py b/google/cloud/translate_v3/services/translation_service/transports/grpc.py
index 09e06595..2e0a86b0 100644
--- a/google/cloud/translate_v3/services/translation_service/transports/grpc.py
+++ b/google/cloud/translate_v3/services/translation_service/transports/grpc.py
@@ -59,6 +59,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -89,12 +90,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -103,79 +108,71 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
+ self._ssl_channel_credentials = None
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- self._stubs = {} # type: Dict[str, Callable]
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
@classmethod
def create_channel(
cls,
@@ -188,7 +185,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -223,12 +220,8 @@ def create_channel(
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Return the channel from cache.
return self._grpc_channel
@property
@@ -239,13 +232,11 @@ def operations_client(self) -> operations_v1.OperationsClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsClient(
- self.grpc_channel
- )
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def translate_text(
diff --git a/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py b/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py
index e7e9c05c..442b84dc 100644
--- a/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py
+++ b/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py
@@ -63,7 +63,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -103,6 +103,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -134,12 +135,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -148,78 +153,70 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ self._ssl_channel_credentials = None
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
else:
- host = host if ":" in host else host + ":443"
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
- self._stubs = {}
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
@@ -239,13 +236,13 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient(
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def translate_text(
diff --git a/google/cloud/translate_v3/types/__init__.py b/google/cloud/translate_v3/types/__init__.py
index 60c0f41d..98fddfeb 100644
--- a/google/cloud/translate_v3/types/__init__.py
+++ b/google/cloud/translate_v3/types/__init__.py
@@ -16,62 +16,61 @@
#
from .translation_service import (
- TranslateTextGlossaryConfig,
- TranslateTextRequest,
- TranslateTextResponse,
- Translation,
- DetectLanguageRequest,
- DetectedLanguage,
- DetectLanguageResponse,
- GetSupportedLanguagesRequest,
- SupportedLanguages,
- SupportedLanguage,
- GcsSource,
- InputConfig,
- GcsDestination,
- OutputConfig,
- BatchTranslateTextRequest,
BatchTranslateMetadata,
BatchTranslateResponse,
- GlossaryInputConfig,
- Glossary,
+ BatchTranslateTextRequest,
+ CreateGlossaryMetadata,
CreateGlossaryRequest,
- GetGlossaryRequest,
+ DeleteGlossaryMetadata,
DeleteGlossaryRequest,
+ DeleteGlossaryResponse,
+ DetectedLanguage,
+ DetectLanguageRequest,
+ DetectLanguageResponse,
+ GcsDestination,
+ GcsSource,
+ GetGlossaryRequest,
+ GetSupportedLanguagesRequest,
+ Glossary,
+ GlossaryInputConfig,
+ InputConfig,
ListGlossariesRequest,
ListGlossariesResponse,
- CreateGlossaryMetadata,
- DeleteGlossaryMetadata,
- DeleteGlossaryResponse,
+ OutputConfig,
+ SupportedLanguage,
+ SupportedLanguages,
+ TranslateTextGlossaryConfig,
+ TranslateTextRequest,
+ TranslateTextResponse,
+ Translation,
)
-
__all__ = (
- "TranslateTextGlossaryConfig",
- "TranslateTextRequest",
- "TranslateTextResponse",
- "Translation",
- "DetectLanguageRequest",
- "DetectedLanguage",
- "DetectLanguageResponse",
- "GetSupportedLanguagesRequest",
- "SupportedLanguages",
- "SupportedLanguage",
- "GcsSource",
- "InputConfig",
- "GcsDestination",
- "OutputConfig",
- "BatchTranslateTextRequest",
"BatchTranslateMetadata",
"BatchTranslateResponse",
- "GlossaryInputConfig",
- "Glossary",
+ "BatchTranslateTextRequest",
+ "CreateGlossaryMetadata",
"CreateGlossaryRequest",
- "GetGlossaryRequest",
+ "DeleteGlossaryMetadata",
"DeleteGlossaryRequest",
+ "DeleteGlossaryResponse",
+ "DetectedLanguage",
+ "DetectLanguageRequest",
+ "DetectLanguageResponse",
+ "GcsDestination",
+ "GcsSource",
+ "GetGlossaryRequest",
+ "GetSupportedLanguagesRequest",
+ "Glossary",
+ "GlossaryInputConfig",
+ "InputConfig",
"ListGlossariesRequest",
"ListGlossariesResponse",
- "CreateGlossaryMetadata",
- "DeleteGlossaryMetadata",
- "DeleteGlossaryResponse",
+ "OutputConfig",
+ "SupportedLanguage",
+ "SupportedLanguages",
+ "TranslateTextGlossaryConfig",
+ "TranslateTextRequest",
+ "TranslateTextResponse",
+ "Translation",
)
diff --git a/google/cloud/translate_v3/types/translation_service.py b/google/cloud/translate_v3/types/translation_service.py
index 9da6d491..3740856f 100644
--- a/google/cloud/translate_v3/types/translation_service.py
+++ b/google/cloud/translate_v3/types/translation_service.py
@@ -133,12 +133,12 @@ class TranslateTextRequest(proto.Message):
If missing, the system decides which google base model to
use.
- glossary_config (~.translation_service.TranslateTextGlossaryConfig):
+ glossary_config (google.cloud.translate_v3.types.TranslateTextGlossaryConfig):
Optional. Glossary to be applied. The glossary must be
within the same region (have the same location-id) as the
model, otherwise an INVALID_ARGUMENT (400) error is
returned.
- labels (Sequence[~.translation_service.TranslateTextRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.translate_v3.types.TranslateTextRequest.LabelsEntry]):
Optional. The labels with user-defined
metadata for the request.
Label keys and values can be no longer than 63
@@ -165,7 +165,7 @@ class TranslateTextRequest(proto.Message):
model = proto.Field(proto.STRING, number=6)
glossary_config = proto.Field(
- proto.MESSAGE, number=7, message=TranslateTextGlossaryConfig,
+ proto.MESSAGE, number=7, message="TranslateTextGlossaryConfig",
)
labels = proto.MapField(proto.STRING, proto.STRING, number=10)
@@ -175,11 +175,11 @@ class TranslateTextResponse(proto.Message):
r"""
Attributes:
- translations (Sequence[~.translation_service.Translation]):
+ translations (Sequence[google.cloud.translate_v3.types.Translation]):
Text translation responses with no glossary applied. This
field has the same length as
[``contents``][google.cloud.translation.v3.TranslateTextRequest.contents].
- glossary_translations (Sequence[~.translation_service.Translation]):
+ glossary_translations (Sequence[google.cloud.translate_v3.types.Translation]):
Text translation responses if a glossary is provided in the
request. This can be the same as
[``translations``][google.cloud.translation.v3.TranslateTextResponse.translations]
@@ -216,7 +216,7 @@ class Translation(proto.Message):
request. If the source language was passed,
auto-detection of the language does not occur
and this field is empty.
- glossary_config (~.translation_service.TranslateTextGlossaryConfig):
+ glossary_config (google.cloud.translate_v3.types.TranslateTextGlossaryConfig):
The ``glossary_config`` used for this translation.
"""
@@ -227,7 +227,7 @@ class Translation(proto.Message):
detected_language_code = proto.Field(proto.STRING, number=4)
glossary_config = proto.Field(
- proto.MESSAGE, number=3, message=TranslateTextGlossaryConfig,
+ proto.MESSAGE, number=3, message="TranslateTextGlossaryConfig",
)
@@ -266,7 +266,7 @@ class DetectLanguageRequest(proto.Message):
Optional. The format of the source text, for
example, "text/html", "text/plain". If left
blank, the MIME type defaults to "text/html".
- labels (Sequence[~.translation_service.DetectLanguageRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.translate_v3.types.DetectLanguageRequest.LabelsEntry]):
Optional. The labels with user-defined
metadata for the request.
Label keys and values can be no longer than 63
@@ -312,13 +312,15 @@ class DetectLanguageResponse(proto.Message):
r"""The response message for language detection.
Attributes:
- languages (Sequence[~.translation_service.DetectedLanguage]):
+ languages (Sequence[google.cloud.translate_v3.types.DetectedLanguage]):
A list of detected languages sorted by
detection confidence in descending order. The
most probable language first.
"""
- languages = proto.RepeatedField(proto.MESSAGE, number=1, message=DetectedLanguage,)
+ languages = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="DetectedLanguage",
+ )
class GetSupportedLanguagesRequest(proto.Message):
@@ -374,7 +376,7 @@ class SupportedLanguages(proto.Message):
r"""The response message for discovering supported languages.
Attributes:
- languages (Sequence[~.translation_service.SupportedLanguage]):
+ languages (Sequence[google.cloud.translate_v3.types.SupportedLanguage]):
A list of supported language responses. This
list contains an entry for each language the
Translation API supports.
@@ -436,7 +438,7 @@ class InputConfig(proto.Message):
"text/html" is used if mime_type is missing. For ``.html``,
this field must be "text/html" or empty. For ``.txt``, this
field must be "text/plain" or empty.
- gcs_source (~.translation_service.GcsSource):
+ gcs_source (google.cloud.translate_v3.types.GcsSource):
Required. Google Cloud Storage location for the source
input. This can be a single file (for example,
``gs://translation-test/input.tsv``) or a wildcard (for
@@ -461,7 +463,7 @@ class InputConfig(proto.Message):
mime_type = proto.Field(proto.STRING, number=1)
gcs_source = proto.Field(
- proto.MESSAGE, number=2, oneof="source", message=GcsSource,
+ proto.MESSAGE, number=2, oneof="source", message="GcsSource",
)
@@ -483,7 +485,7 @@ class OutputConfig(proto.Message):
r"""Output configuration for BatchTranslateText request.
Attributes:
- gcs_destination (~.translation_service.GcsDestination):
+ gcs_destination (google.cloud.translate_v3.types.GcsDestination):
Google Cloud Storage destination for output content. For
every single input file (for example,
gs://a/b/c.[extension]), we generate at most 2 \* n output
@@ -558,7 +560,7 @@ class OutputConfig(proto.Message):
"""
gcs_destination = proto.Field(
- proto.MESSAGE, number=1, oneof="destination", message=GcsDestination,
+ proto.MESSAGE, number=1, oneof="destination", message="GcsDestination",
)
@@ -584,7 +586,7 @@ class BatchTranslateTextRequest(proto.Message):
target_language_codes (Sequence[str]):
Required. Specify up to 10 language codes
here.
- models (Sequence[~.translation_service.BatchTranslateTextRequest.ModelsEntry]):
+ models (Sequence[google.cloud.translate_v3.types.BatchTranslateTextRequest.ModelsEntry]):
Optional. The models to use for translation. Map's key is
target language code. Map's value is model name. Value can
be a built-in general model, or an AutoML Translation model.
@@ -600,21 +602,21 @@ class BatchTranslateTextRequest(proto.Message):
If the map is empty or a specific model is not requested for
a language pair, then default google model (nmt) is used.
- input_configs (Sequence[~.translation_service.InputConfig]):
+ input_configs (Sequence[google.cloud.translate_v3.types.InputConfig]):
Required. Input configurations.
The total number of files matched should be <=
1000. The total content size should be <= 100M
Unicode codepoints. The files must use UTF-8
encoding.
- output_config (~.translation_service.OutputConfig):
+ output_config (google.cloud.translate_v3.types.OutputConfig):
Required. Output configuration.
If 2 input configs match to the same file (that
is, same input path), we don't generate output
for duplicate inputs.
- glossaries (Sequence[~.translation_service.BatchTranslateTextRequest.GlossariesEntry]):
+ glossaries (Sequence[google.cloud.translate_v3.types.BatchTranslateTextRequest.GlossariesEntry]):
Optional. Glossaries to be applied for
translation. It's keyed by target language code.
- labels (Sequence[~.translation_service.BatchTranslateTextRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.translate_v3.types.BatchTranslateTextRequest.LabelsEntry]):
Optional. The labels with user-defined
metadata for the request.
Label keys and values can be no longer than 63
@@ -636,12 +638,12 @@ class BatchTranslateTextRequest(proto.Message):
models = proto.MapField(proto.STRING, proto.STRING, number=4)
- input_configs = proto.RepeatedField(proto.MESSAGE, number=5, message=InputConfig,)
+ input_configs = proto.RepeatedField(proto.MESSAGE, number=5, message="InputConfig",)
- output_config = proto.Field(proto.MESSAGE, number=6, message=OutputConfig,)
+ output_config = proto.Field(proto.MESSAGE, number=6, message="OutputConfig",)
glossaries = proto.MapField(
- proto.STRING, proto.MESSAGE, number=7, message=TranslateTextGlossaryConfig,
+ proto.STRING, proto.MESSAGE, number=7, message="TranslateTextGlossaryConfig",
)
labels = proto.MapField(proto.STRING, proto.STRING, number=9)
@@ -651,7 +653,7 @@ class BatchTranslateMetadata(proto.Message):
r"""State metadata for the batch translation operation.
Attributes:
- state (~.translation_service.BatchTranslateMetadata.State):
+ state (google.cloud.translate_v3.types.BatchTranslateMetadata.State):
The state of the operation.
translated_characters (int):
Number of successfully translated characters
@@ -665,7 +667,7 @@ class BatchTranslateMetadata(proto.Message):
codepoints from input files times the number of
target languages and appears here shortly after
the call is submitted.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
Time when the operation was submitted.
"""
@@ -705,9 +707,9 @@ class BatchTranslateResponse(proto.Message):
failed_characters (int):
Number of characters that have failed to
process (Unicode codepoints).
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
Time when the operation was submitted.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation is finished and
[google.longrunning.Operation.done][google.longrunning.Operation.done]
is set to true.
@@ -728,7 +730,7 @@ class GlossaryInputConfig(proto.Message):
r"""Input configuration for glossaries.
Attributes:
- gcs_source (~.translation_service.GcsSource):
+ gcs_source (google.cloud.translate_v3.types.GcsSource):
Required. Google Cloud Storage location of glossary data.
File format is determined based on the filename extension.
API returns [google.rpc.Code.INVALID_ARGUMENT] for
@@ -757,7 +759,7 @@ class GlossaryInputConfig(proto.Message):
"""
gcs_source = proto.Field(
- proto.MESSAGE, number=1, oneof="source", message=GcsSource,
+ proto.MESSAGE, number=1, oneof="source", message="GcsSource",
)
@@ -769,20 +771,20 @@ class Glossary(proto.Message):
Required. The resource name of the glossary. Glossary names
have the form
``projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}``.
- language_pair (~.translation_service.Glossary.LanguageCodePair):
+ language_pair (google.cloud.translate_v3.types.Glossary.LanguageCodePair):
Used with unidirectional glossaries.
- language_codes_set (~.translation_service.Glossary.LanguageCodesSet):
+ language_codes_set (google.cloud.translate_v3.types.Glossary.LanguageCodesSet):
Used with equivalent term set glossaries.
- input_config (~.translation_service.GlossaryInputConfig):
+ input_config (google.cloud.translate_v3.types.GlossaryInputConfig):
Required. Provides examples to build the
glossary from. Total glossary must not exceed
10M Unicode codepoints.
entry_count (int):
Output only. The number of entries defined in
the glossary.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When CreateGlossary was called.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When the glossary creation was
finished.
"""
@@ -828,7 +830,7 @@ class LanguageCodesSet(proto.Message):
proto.MESSAGE, number=4, oneof="languages", message=LanguageCodesSet,
)
- input_config = proto.Field(proto.MESSAGE, number=5, message=GlossaryInputConfig,)
+ input_config = proto.Field(proto.MESSAGE, number=5, message="GlossaryInputConfig",)
entry_count = proto.Field(proto.INT32, number=6)
@@ -843,13 +845,13 @@ class CreateGlossaryRequest(proto.Message):
Attributes:
parent (str):
Required. The project name.
- glossary (~.translation_service.Glossary):
+ glossary (google.cloud.translate_v3.types.Glossary):
Required. The glossary to create.
"""
parent = proto.Field(proto.STRING, number=1)
- glossary = proto.Field(proto.MESSAGE, number=2, message=Glossary,)
+ glossary = proto.Field(proto.MESSAGE, number=2, message="Glossary",)
class GetGlossaryRequest(proto.Message):
@@ -913,7 +915,7 @@ class ListGlossariesResponse(proto.Message):
r"""Response message for ListGlossaries.
Attributes:
- glossaries (Sequence[~.translation_service.Glossary]):
+ glossaries (Sequence[google.cloud.translate_v3.types.Glossary]):
The list of glossaries for a project.
next_page_token (str):
A token to retrieve a page of results. Pass this value in
@@ -926,7 +928,7 @@ class ListGlossariesResponse(proto.Message):
def raw_page(self):
return self
- glossaries = proto.RepeatedField(proto.MESSAGE, number=1, message=Glossary,)
+ glossaries = proto.RepeatedField(proto.MESSAGE, number=1, message="Glossary",)
next_page_token = proto.Field(proto.STRING, number=2)
@@ -940,10 +942,10 @@ class CreateGlossaryMetadata(proto.Message):
name (str):
The name of the glossary that is being
created.
- state (~.translation_service.CreateGlossaryMetadata.State):
+ state (google.cloud.translate_v3.types.CreateGlossaryMetadata.State):
The current state of the glossary creation
operation.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation was submitted to
the server.
"""
@@ -975,10 +977,10 @@ class DeleteGlossaryMetadata(proto.Message):
name (str):
The name of the glossary that is being
deleted.
- state (~.translation_service.DeleteGlossaryMetadata.State):
+ state (google.cloud.translate_v3.types.DeleteGlossaryMetadata.State):
The current state of the glossary deletion
operation.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation was submitted to
the server.
"""
@@ -1009,10 +1011,10 @@ class DeleteGlossaryResponse(proto.Message):
Attributes:
name (str):
The name of the deleted glossary.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation was submitted to
the server.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the glossary deletion is finished and
[google.longrunning.Operation.done][google.longrunning.Operation.done]
is set to true.
diff --git a/google/cloud/translate_v3beta1/__init__.py b/google/cloud/translate_v3beta1/__init__.py
index b7cd2fe0..80a31e78 100644
--- a/google/cloud/translate_v3beta1/__init__.py
+++ b/google/cloud/translate_v3beta1/__init__.py
@@ -16,6 +16,11 @@
#
from .services.translation_service import TranslationServiceClient
+from .types.translation_service import BatchDocumentInputConfig
+from .types.translation_service import BatchDocumentOutputConfig
+from .types.translation_service import BatchTranslateDocumentMetadata
+from .types.translation_service import BatchTranslateDocumentRequest
+from .types.translation_service import BatchTranslateDocumentResponse
from .types.translation_service import BatchTranslateMetadata
from .types.translation_service import BatchTranslateResponse
from .types.translation_service import BatchTranslateTextRequest
@@ -27,6 +32,9 @@
from .types.translation_service import DetectLanguageRequest
from .types.translation_service import DetectLanguageResponse
from .types.translation_service import DetectedLanguage
+from .types.translation_service import DocumentInputConfig
+from .types.translation_service import DocumentOutputConfig
+from .types.translation_service import DocumentTranslation
from .types.translation_service import GcsDestination
from .types.translation_service import GcsSource
from .types.translation_service import GetGlossaryRequest
@@ -39,6 +47,8 @@
from .types.translation_service import OutputConfig
from .types.translation_service import SupportedLanguage
from .types.translation_service import SupportedLanguages
+from .types.translation_service import TranslateDocumentRequest
+from .types.translation_service import TranslateDocumentResponse
from .types.translation_service import TranslateTextGlossaryConfig
from .types.translation_service import TranslateTextRequest
from .types.translation_service import TranslateTextResponse
@@ -46,6 +56,11 @@
__all__ = (
+ "BatchDocumentInputConfig",
+ "BatchDocumentOutputConfig",
+ "BatchTranslateDocumentMetadata",
+ "BatchTranslateDocumentRequest",
+ "BatchTranslateDocumentResponse",
"BatchTranslateMetadata",
"BatchTranslateResponse",
"BatchTranslateTextRequest",
@@ -57,6 +72,9 @@
"DetectLanguageRequest",
"DetectLanguageResponse",
"DetectedLanguage",
+ "DocumentInputConfig",
+ "DocumentOutputConfig",
+ "DocumentTranslation",
"GcsDestination",
"GcsSource",
"GetGlossaryRequest",
@@ -69,6 +87,8 @@
"OutputConfig",
"SupportedLanguage",
"SupportedLanguages",
+ "TranslateDocumentRequest",
+ "TranslateDocumentResponse",
"TranslateTextGlossaryConfig",
"TranslateTextRequest",
"TranslateTextResponse",
diff --git a/google/cloud/translate_v3beta1/services/translation_service/async_client.py b/google/cloud/translate_v3beta1/services/translation_service/async_client.py
index 9054a09b..f4b201cf 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/async_client.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/async_client.py
@@ -50,9 +50,76 @@ class TranslationServiceAsyncClient:
glossary_path = staticmethod(TranslationServiceClient.glossary_path)
parse_glossary_path = staticmethod(TranslationServiceClient.parse_glossary_path)
- from_service_account_file = TranslationServiceClient.from_service_account_file
+ common_billing_account_path = staticmethod(
+ TranslationServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ TranslationServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(TranslationServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ TranslationServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ TranslationServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ TranslationServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(TranslationServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ TranslationServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(TranslationServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ TranslationServiceClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranslationServiceAsyncClient: The constructed client.
+ """
+ return TranslationServiceClient.from_service_account_info.__func__(TranslationServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranslationServiceAsyncClient: The constructed client.
+ """
+ return TranslationServiceClient.from_service_account_file.__func__(TranslationServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> TranslationServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ TranslationServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(TranslationServiceClient).get_transport_class,
type(TranslationServiceClient),
@@ -117,7 +184,7 @@ async def translate_text(
r"""Translates input text and returns translated text.
Args:
- request (:class:`~.translation_service.TranslateTextRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.TranslateTextRequest`):
The request object. The request message for synchronous
translation.
@@ -128,7 +195,7 @@ async def translate_text(
sent along with the request as metadata.
Returns:
- ~.translation_service.TranslateTextResponse:
+ google.cloud.translate_v3beta1.types.TranslateTextResponse:
"""
# Create or coerce a protobuf request object.
@@ -169,7 +236,7 @@ async def detect_language(
r"""Detects the language of text within a request.
Args:
- request (:class:`~.translation_service.DetectLanguageRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.DetectLanguageRequest`):
The request object. The request message for language
detection.
parent (:class:`str`):
@@ -177,16 +244,17 @@ async def detect_language(
to a caller's project.
Format:
- ``projects/{project-id}/locations/{location-id}`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/{location-id}``
+ or ``projects/{project-number-or-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
- Only models within the same region (has same
- location-id) can be used. Otherwise an INVALID_ARGUMENT
+ Only models within the same region, which have the same
+ location-id, can be used. Otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -194,13 +262,14 @@ async def detect_language(
Optional. The language detection model to be used.
Format:
- ``projects/{project-id}/locations/{location-id}/models/language-detection/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}``
Only one language detection model is currently
supported:
- ``projects/{project-id}/locations/{location-id}/models/language-detection/default``.
+ ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``.
If not specified, the default model is used.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -209,6 +278,7 @@ async def detect_language(
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
+
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -220,7 +290,7 @@ async def detect_language(
sent along with the request as metadata.
Returns:
- ~.translation_service.DetectLanguageResponse:
+ google.cloud.translate_v3beta1.types.DetectLanguageResponse:
The response message for language
detection.
@@ -228,7 +298,8 @@ async def detect_language(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, model, mime_type]):
+ has_flattened_params = any([parent, model, mime_type])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -281,25 +352,26 @@ async def get_supported_languages(
translation.
Args:
- request (:class:`~.translation_service.GetSupportedLanguagesRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.GetSupportedLanguagesRequest`):
The request object. The request message for discovering
supported languages.
parent (:class:`str`):
Required. Project or location to make a call. Must refer
to a caller's project.
- Format: ``projects/{project-id}`` or
- ``projects/{project-id}/locations/{location-id}``.
+ Format: ``projects/{project-number-or-id}`` or
+ ``projects/{project-number-or-id}/locations/{location-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
Non-global location is required for AutoML models.
Only models within the same region (have same
location-id) can be used, otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -309,6 +381,7 @@ async def get_supported_languages(
of supported languages. If missing, then
display names are not returned in a
response.
+
This corresponds to the ``display_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -318,15 +391,16 @@ async def get_supported_languages(
The format depends on model type:
- AutoML Translation models:
- ``projects/{project-id}/locations/{location-id}/models/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
- ``projects/{project-id}/locations/{location-id}/models/general/nmt``,
- ``projects/{project-id}/locations/{location-id}/models/general/base``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
Returns languages supported by the specified model. If
missing, we get supported languages of Google general
base (PBMT) model.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -338,7 +412,7 @@ async def get_supported_languages(
sent along with the request as metadata.
Returns:
- ~.translation_service.SupportedLanguages:
+ google.cloud.translate_v3beta1.types.SupportedLanguages:
The response message for discovering
supported languages.
@@ -346,7 +420,8 @@ async def get_supported_languages(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, display_language_code, model]):
+ has_flattened_params = any([parent, display_language_code, model])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -373,8 +448,9 @@ async def get_supported_languages(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -392,6 +468,56 @@ async def get_supported_languages(
# Done; return the response.
return response
+ async def translate_document(
+ self,
+ request: translation_service.TranslateDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> translation_service.TranslateDocumentResponse:
+ r"""Translates documents in synchronous mode.
+
+ Args:
+ request (:class:`google.cloud.translate_v3beta1.types.TranslateDocumentRequest`):
+ The request object. A document translation request.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.translate_v3beta1.types.TranslateDocumentResponse:
+ A translated document response
+ message.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = translation_service.TranslateDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.translate_document,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
async def batch_translate_text(
self,
request: translation_service.BatchTranslateTextRequest = None,
@@ -411,7 +537,7 @@ async def batch_translate_text(
of the call.
Args:
- request (:class:`~.translation_service.BatchTranslateTextRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.BatchTranslateTextRequest`):
The request object. The batch translation request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -421,15 +547,13 @@ async def batch_translate_text(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.BatchTranslateResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by BatchTranslateText if at least one
- sentence is translated successfully.
+ The result type for the operation will be :class:`google.cloud.translate_v3beta1.types.BatchTranslateResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateText if at least one
+ sentence is translated successfully.
"""
# Create or coerce a protobuf request object.
@@ -464,6 +588,76 @@ async def batch_translate_text(
# Done; return the response.
return response
+ async def batch_translate_document(
+ self,
+ request: translation_service.BatchTranslateDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Translates a large volume of documents in
+ asynchronous batch mode. This function provides real-
+ time output as the inputs are being processed. If caller
+ cancels a request, the partial results (for an input
+ file, it's all or nothing) may still be available on the
+ specified output location.
+ This call returns immediately and you can use
+ google.longrunning.Operation.name to poll the status of
+ the call.
+
+ Args:
+ request (:class:`google.cloud.translate_v3beta1.types.BatchTranslateDocumentRequest`):
+ The request object. The BatchTranslateDocument request.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be :class:`google.cloud.translate_v3beta1.types.BatchTranslateDocumentResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateDocument if at least
+ one document is translated successfully.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = translation_service.BatchTranslateDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.batch_translate_document,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ translation_service.BatchTranslateDocumentResponse,
+ metadata_type=translation_service.BatchTranslateDocumentMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
async def create_glossary(
self,
request: translation_service.CreateGlossaryRequest = None,
@@ -478,14 +672,14 @@ async def create_glossary(
Returns NOT_FOUND, if the project doesn't exist.
Args:
- request (:class:`~.translation_service.CreateGlossaryRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.CreateGlossaryRequest`):
The request object. Request message for CreateGlossary.
parent (:class:`str`):
Required. The project name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- glossary (:class:`~.translation_service.Glossary`):
+ glossary (:class:`google.cloud.translate_v3beta1.types.Glossary`):
Required. The glossary to create.
This corresponds to the ``glossary`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -498,18 +692,19 @@ async def create_glossary(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.translation_service.Glossary``: Represents a
- glossary built from user provided data.
+ :class:`google.cloud.translate_v3beta1.types.Glossary`
+ Represents a glossary built from user provided data.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, glossary]):
+ has_flattened_params = any([parent, glossary])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -567,21 +762,47 @@ async def list_glossaries(
doesn't exist.
Args:
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.ListGlossariesRequest`):
The request object. Request message for ListGlossaries.
parent (:class:`str`):
Required. The name of the project
from which to list all of the
glossaries.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (:class:`str`):
Optional. Filter specifying
- constraints of a list operation.
- Filtering is not supported yet, and the
- parameter currently has no effect. If
- missing, no filtering is performed.
+ constraints of a list operation. Specify
+ the constraint by the format of
+ "key=value", where key must be "src" or
+ "tgt", and the value must be a valid
+ language code. For multiple
+ restrictions, concatenate them by "AND"
+ (uppercase only), such as: "src=en-US
+ AND tgt=zh-CN". Notice that the exact
+ match is used here, which means using
+ 'en-US' and 'en' can lead to different
+ results, which depends on the language
+ code you used when you create the
+ glossary. For the unidirectional
+ glossaries, the "src" and "tgt" add
+ restrictions on the source and target
+ language code separately. For the
+ equivalent term set glossaries, the
+ "src" and/or "tgt" add restrictions on
+ the term set.
+ For example: "src=en-US AND tgt=zh-CN"
+ will only pick the unidirectional
+ glossaries which exactly match the
+ source language code as "en-US" and the
+ target language code "zh-CN", but all
+ equivalent term set glossaries which
+ contain "en-US" and "zh-CN" in their
+ language set will be picked. If missing,
+ no filtering is performed.
+
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -593,7 +814,7 @@ async def list_glossaries(
sent along with the request as metadata.
Returns:
- ~.pagers.ListGlossariesAsyncPager:
+ google.cloud.translate_v3beta1.services.translation_service.pagers.ListGlossariesAsyncPager:
Response message for ListGlossaries.
Iterating over this object will yield
results and resolve additional pages
@@ -603,7 +824,8 @@ async def list_glossaries(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, filter]):
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -628,8 +850,9 @@ async def list_glossaries(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -666,11 +889,12 @@ async def get_glossary(
exist.
Args:
- request (:class:`~.translation_service.GetGlossaryRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.GetGlossaryRequest`):
The request object. Request message for GetGlossary.
name (:class:`str`):
Required. The name of the glossary to
retrieve.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -682,7 +906,7 @@ async def get_glossary(
sent along with the request as metadata.
Returns:
- ~.translation_service.Glossary:
+ google.cloud.translate_v3beta1.types.Glossary:
Represents a glossary built from user
provided data.
@@ -690,7 +914,8 @@ async def get_glossary(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -713,8 +938,9 @@ async def get_glossary(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -746,11 +972,12 @@ async def delete_glossary(
doesn't exist.
Args:
- request (:class:`~.translation_service.DeleteGlossaryRequest`):
+ request (:class:`google.cloud.translate_v3beta1.types.DeleteGlossaryRequest`):
The request object. Request message for DeleteGlossary.
name (:class:`str`):
Required. The name of the glossary to
delete.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -762,20 +989,19 @@ async def delete_glossary(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.DeleteGlossaryResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by DeleteGlossary.
+ The result type for the operation will be :class:`google.cloud.translate_v3beta1.types.DeleteGlossaryResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by DeleteGlossary.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -798,8 +1024,9 @@ async def delete_glossary(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=DEFAULT_CLIENT_INFO,
diff --git a/google/cloud/translate_v3beta1/services/translation_service/client.py b/google/cloud/translate_v3beta1/services/translation_service/client.py
index 47a82cca..f2623a3b 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/client.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/client.py
@@ -115,6 +115,22 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranslationServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -127,7 +143,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ TranslationServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -135,6 +151,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> TranslationServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ TranslationServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def glossary_path(project: str, location: str, glossary: str,) -> str:
"""Return a fully-qualified glossary string."""
@@ -151,6 +176,65 @@ def parse_glossary_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
@@ -167,10 +251,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.TranslationServiceTransport]): The
+ transport (Union[str, TranslationServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -186,10 +270,10 @@ def __init__(
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -206,21 +290,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -263,7 +343,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -279,7 +359,7 @@ def translate_text(
r"""Translates input text and returns translated text.
Args:
- request (:class:`~.translation_service.TranslateTextRequest`):
+ request (google.cloud.translate_v3beta1.types.TranslateTextRequest):
The request object. The request message for synchronous
translation.
@@ -290,7 +370,7 @@ def translate_text(
sent along with the request as metadata.
Returns:
- ~.translation_service.TranslateTextResponse:
+ google.cloud.translate_v3beta1.types.TranslateTextResponse:
"""
# Create or coerce a protobuf request object.
@@ -332,46 +412,49 @@ def detect_language(
r"""Detects the language of text within a request.
Args:
- request (:class:`~.translation_service.DetectLanguageRequest`):
+ request (google.cloud.translate_v3beta1.types.DetectLanguageRequest):
The request object. The request message for language
detection.
- parent (:class:`str`):
+ parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
Format:
- ``projects/{project-id}/locations/{location-id}`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/{location-id}``
+ or ``projects/{project-number-or-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
- Only models within the same region (has same
- location-id) can be used. Otherwise an INVALID_ARGUMENT
+ Only models within the same region, which have the same
+ location-id, can be used. Otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- model (:class:`str`):
+ model (str):
Optional. The language detection model to be used.
Format:
- ``projects/{project-id}/locations/{location-id}/models/language-detection/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}``
Only one language detection model is currently
supported:
- ``projects/{project-id}/locations/{location-id}/models/language-detection/default``.
+ ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``.
If not specified, the default model is used.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- mime_type (:class:`str`):
+ mime_type (str):
Optional. The format of the source
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
+
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -383,7 +466,7 @@ def detect_language(
sent along with the request as metadata.
Returns:
- ~.translation_service.DetectLanguageResponse:
+ google.cloud.translate_v3beta1.types.DetectLanguageResponse:
The response message for language
detection.
@@ -446,52 +529,55 @@ def get_supported_languages(
translation.
Args:
- request (:class:`~.translation_service.GetSupportedLanguagesRequest`):
+ request (google.cloud.translate_v3beta1.types.GetSupportedLanguagesRequest):
The request object. The request message for discovering
supported languages.
- parent (:class:`str`):
+ parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
- Format: ``projects/{project-id}`` or
- ``projects/{project-id}/locations/{location-id}``.
+ Format: ``projects/{project-number-or-id}`` or
+ ``projects/{project-number-or-id}/locations/{location-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
Non-global location is required for AutoML models.
Only models within the same region (have same
location-id) can be used, otherwise an INVALID_ARGUMENT
(400) error is returned.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- display_language_code (:class:`str`):
+ display_language_code (str):
Optional. The language to use to
return localized, human readable names
of supported languages. If missing, then
display names are not returned in a
response.
+
This corresponds to the ``display_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- model (:class:`str`):
+ model (str):
Optional. Get supported languages of this model.
The format depends on model type:
- AutoML Translation models:
- ``projects/{project-id}/locations/{location-id}/models/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
- ``projects/{project-id}/locations/{location-id}/models/general/nmt``,
- ``projects/{project-id}/locations/{location-id}/models/general/base``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
Returns languages supported by the specified model. If
missing, we get supported languages of Google general
base (PBMT) model.
+
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -503,7 +589,7 @@ def get_supported_languages(
sent along with the request as metadata.
Returns:
- ~.translation_service.SupportedLanguages:
+ google.cloud.translate_v3beta1.types.SupportedLanguages:
The response message for discovering
supported languages.
@@ -551,6 +637,57 @@ def get_supported_languages(
# Done; return the response.
return response
+ def translate_document(
+ self,
+ request: translation_service.TranslateDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> translation_service.TranslateDocumentResponse:
+ r"""Translates documents in synchronous mode.
+
+ Args:
+ request (google.cloud.translate_v3beta1.types.TranslateDocumentRequest):
+ The request object. A document translation request.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.translate_v3beta1.types.TranslateDocumentResponse:
+ A translated document response
+ message.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a translation_service.TranslateDocumentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, translation_service.TranslateDocumentRequest):
+ request = translation_service.TranslateDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.translate_document]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
def batch_translate_text(
self,
request: translation_service.BatchTranslateTextRequest = None,
@@ -570,7 +707,7 @@ def batch_translate_text(
of the call.
Args:
- request (:class:`~.translation_service.BatchTranslateTextRequest`):
+ request (google.cloud.translate_v3beta1.types.BatchTranslateTextRequest):
The request object. The batch translation request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -580,15 +717,13 @@ def batch_translate_text(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.BatchTranslateResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by BatchTranslateText if at least one
- sentence is translated successfully.
+ The result type for the operation will be :class:`google.cloud.translate_v3beta1.types.BatchTranslateResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateText if at least one
+ sentence is translated successfully.
"""
# Create or coerce a protobuf request object.
@@ -624,6 +759,77 @@ def batch_translate_text(
# Done; return the response.
return response
+ def batch_translate_document(
+ self,
+ request: translation_service.BatchTranslateDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Translates a large volume of documents in
+ asynchronous batch mode. This function provides real-
+ time output as the inputs are being processed. If caller
+ cancels a request, the partial results (for an input
+ file, it's all or nothing) may still be available on the
+ specified output location.
+ This call returns immediately and you can use
+ google.longrunning.Operation.name to poll the status of
+ the call.
+
+ Args:
+ request (google.cloud.translate_v3beta1.types.BatchTranslateDocumentRequest):
+ The request object. The BatchTranslateDocument request.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be :class:`google.cloud.translate_v3beta1.types.BatchTranslateDocumentResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateDocument if at least
+ one document is translated successfully.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a translation_service.BatchTranslateDocumentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, translation_service.BatchTranslateDocumentRequest):
+ request = translation_service.BatchTranslateDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.batch_translate_document]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ translation_service.BatchTranslateDocumentResponse,
+ metadata_type=translation_service.BatchTranslateDocumentMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
def create_glossary(
self,
request: translation_service.CreateGlossaryRequest = None,
@@ -638,14 +844,14 @@ def create_glossary(
Returns NOT_FOUND, if the project doesn't exist.
Args:
- request (:class:`~.translation_service.CreateGlossaryRequest`):
+ request (google.cloud.translate_v3beta1.types.CreateGlossaryRequest):
The request object. Request message for CreateGlossary.
- parent (:class:`str`):
+ parent (str):
Required. The project name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- glossary (:class:`~.translation_service.Glossary`):
+ glossary (google.cloud.translate_v3beta1.types.Glossary):
Required. The glossary to create.
This corresponds to the ``glossary`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -658,12 +864,12 @@ def create_glossary(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.translation_service.Glossary``: Represents a
- glossary built from user provided data.
+ :class:`google.cloud.translate_v3beta1.types.Glossary`
+ Represents a glossary built from user provided data.
"""
# Create or coerce a protobuf request object.
@@ -729,21 +935,47 @@ def list_glossaries(
doesn't exist.
Args:
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (google.cloud.translate_v3beta1.types.ListGlossariesRequest):
The request object. Request message for ListGlossaries.
- parent (:class:`str`):
+ parent (str):
Required. The name of the project
from which to list all of the
glossaries.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- filter (:class:`str`):
+ filter (str):
Optional. Filter specifying
- constraints of a list operation.
- Filtering is not supported yet, and the
- parameter currently has no effect. If
- missing, no filtering is performed.
+ constraints of a list operation. Specify
+ the constraint by the format of
+ "key=value", where key must be "src" or
+ "tgt", and the value must be a valid
+ language code. For multiple
+ restrictions, concatenate them by "AND"
+ (uppercase only), such as: "src=en-US
+ AND tgt=zh-CN". Notice that the exact
+ match is used here, which means using
+ 'en-US' and 'en' can lead to different
+ results, which depends on the language
+ code you used when you create the
+ glossary. For the unidirectional
+ glossaries, the "src" and "tgt" add
+ restrictions on the source and target
+ language code separately. For the
+ equivalent term set glossaries, the
+ "src" and/or "tgt" add restrictions on
+ the term set.
+ For example: "src=en-US AND tgt=zh-CN"
+ will only pick the unidirectional
+ glossaries which exactly match the
+ source language code as "en-US" and the
+ target language code "zh-CN", but all
+ equivalent term set glossaries which
+ contain "en-US" and "zh-CN" in their
+ language set will be picked. If missing,
+ no filtering is performed.
+
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -755,7 +987,7 @@ def list_glossaries(
sent along with the request as metadata.
Returns:
- ~.pagers.ListGlossariesPager:
+ google.cloud.translate_v3beta1.services.translation_service.pagers.ListGlossariesPager:
Response message for ListGlossaries.
Iterating over this object will yield
results and resolve additional pages
@@ -822,11 +1054,12 @@ def get_glossary(
exist.
Args:
- request (:class:`~.translation_service.GetGlossaryRequest`):
+ request (google.cloud.translate_v3beta1.types.GetGlossaryRequest):
The request object. Request message for GetGlossary.
- name (:class:`str`):
+ name (str):
Required. The name of the glossary to
retrieve.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -838,7 +1071,7 @@ def get_glossary(
sent along with the request as metadata.
Returns:
- ~.translation_service.Glossary:
+ google.cloud.translate_v3beta1.types.Glossary:
Represents a glossary built from user
provided data.
@@ -896,11 +1129,12 @@ def delete_glossary(
doesn't exist.
Args:
- request (:class:`~.translation_service.DeleteGlossaryRequest`):
+ request (google.cloud.translate_v3beta1.types.DeleteGlossaryRequest):
The request object. Request message for DeleteGlossary.
- name (:class:`str`):
+ name (str):
Required. The name of the glossary to
delete.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -912,14 +1146,12 @@ def delete_glossary(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.translation_service.DeleteGlossaryResponse``:
- Stored in the
- [google.longrunning.Operation.response][google.longrunning.Operation.response]
- field returned by DeleteGlossary.
+ The result type for the operation will be :class:`google.cloud.translate_v3beta1.types.DeleteGlossaryResponse` Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by DeleteGlossary.
"""
# Create or coerce a protobuf request object.
diff --git a/google/cloud/translate_v3beta1/services/translation_service/pagers.py b/google/cloud/translate_v3beta1/services/translation_service/pagers.py
index a132a74e..a750c43d 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/pagers.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.translate_v3beta1.types import translation_service
@@ -24,7 +33,7 @@ class ListGlossariesPager:
"""A pager for iterating through ``list_glossaries`` requests.
This class thinly wraps an initial
- :class:`~.translation_service.ListGlossariesResponse` object, and
+ :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse` object, and
provides an ``__iter__`` method to iterate through its
``glossaries`` field.
@@ -33,7 +42,7 @@ class ListGlossariesPager:
through the ``glossaries`` field on the
corresponding responses.
- All the usual :class:`~.translation_service.ListGlossariesResponse`
+ All the usual :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -51,9 +60,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (google.cloud.translate_v3beta1.types.ListGlossariesRequest):
The initial request object.
- response (:class:`~.translation_service.ListGlossariesResponse`):
+ response (google.cloud.translate_v3beta1.types.ListGlossariesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -86,7 +95,7 @@ class ListGlossariesAsyncPager:
"""A pager for iterating through ``list_glossaries`` requests.
This class thinly wraps an initial
- :class:`~.translation_service.ListGlossariesResponse` object, and
+ :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``glossaries`` field.
@@ -95,7 +104,7 @@ class ListGlossariesAsyncPager:
through the ``glossaries`` field on the
corresponding responses.
- All the usual :class:`~.translation_service.ListGlossariesResponse`
+ All the usual :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -113,9 +122,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.translation_service.ListGlossariesRequest`):
+ request (google.cloud.translate_v3beta1.types.ListGlossariesRequest):
The initial request object.
- response (:class:`~.translation_service.ListGlossariesResponse`):
+ response (google.cloud.translate_v3beta1.types.ListGlossariesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/translate_v3beta1/services/translation_service/transports/__init__.py b/google/cloud/translate_v3beta1/services/translation_service/transports/__init__.py
index b977312d..92859637 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/transports/__init__.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/transports/__init__.py
@@ -30,7 +30,6 @@
_transport_registry["grpc"] = TranslationServiceGrpcTransport
_transport_registry["grpc_asyncio"] = TranslationServiceGrpcAsyncIOTransport
-
__all__ = (
"TranslationServiceTransport",
"TranslationServiceGrpcTransport",
diff --git a/google/cloud/translate_v3beta1/services/translation_service/transports/base.py b/google/cloud/translate_v3beta1/services/translation_service/transports/base.py
index 7c9f6810..c8a24ade 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/transports/base.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/transports/base.py
@@ -72,10 +72,10 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -83,6 +83,9 @@ def __init__(
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
@@ -92,20 +95,17 @@ def __init__(
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ scopes=self._scopes, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -122,17 +122,26 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
),
+ self.translate_document: gapic_v1.method.wrap_method(
+ self.translate_document, default_timeout=600.0, client_info=client_info,
+ ),
self.batch_translate_text: gapic_v1.method.wrap_method(
self.batch_translate_text,
default_timeout=600.0,
client_info=client_info,
),
+ self.batch_translate_document: gapic_v1.method.wrap_method(
+ self.batch_translate_document,
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
self.create_glossary: gapic_v1.method.wrap_method(
self.create_glossary, default_timeout=600.0, client_info=client_info,
),
@@ -143,8 +152,9 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -156,8 +166,9 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -169,8 +180,9 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
+ deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
@@ -218,6 +230,18 @@ def get_supported_languages(
]:
raise NotImplementedError()
+ @property
+ def translate_document(
+ self,
+ ) -> typing.Callable[
+ [translation_service.TranslateDocumentRequest],
+ typing.Union[
+ translation_service.TranslateDocumentResponse,
+ typing.Awaitable[translation_service.TranslateDocumentResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
@property
def batch_translate_text(
self,
@@ -227,6 +251,15 @@ def batch_translate_text(
]:
raise NotImplementedError()
+ @property
+ def batch_translate_document(
+ self,
+ ) -> typing.Callable[
+ [translation_service.BatchTranslateDocumentRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
@property
def create_glossary(
self,
diff --git a/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py b/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py
index 88882482..9a903357 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py
@@ -59,6 +59,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -89,12 +90,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -103,79 +108,71 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
+ self._ssl_channel_credentials = None
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
else:
- host = host if ":" in host else host + ":443"
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
-
- self._stubs = {} # type: Dict[str, Callable]
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
@classmethod
def create_channel(
cls,
@@ -188,7 +185,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -223,12 +220,8 @@ def create_channel(
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Return the channel from cache.
return self._grpc_channel
@property
@@ -239,13 +232,11 @@ def operations_client(self) -> operations_v1.OperationsClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsClient(
- self.grpc_channel
- )
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def translate_text(
@@ -335,6 +326,35 @@ def get_supported_languages(
)
return self._stubs["get_supported_languages"]
+ @property
+ def translate_document(
+ self,
+ ) -> Callable[
+ [translation_service.TranslateDocumentRequest],
+ translation_service.TranslateDocumentResponse,
+ ]:
+ r"""Return a callable for the translate document method over gRPC.
+
+ Translates documents in synchronous mode.
+
+ Returns:
+ Callable[[~.TranslateDocumentRequest],
+ ~.TranslateDocumentResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "translate_document" not in self._stubs:
+ self._stubs["translate_document"] = self.grpc_channel.unary_unary(
+ "/google.cloud.translation.v3beta1.TranslationService/TranslateDocument",
+ request_serializer=translation_service.TranslateDocumentRequest.serialize,
+ response_deserializer=translation_service.TranslateDocumentResponse.deserialize,
+ )
+ return self._stubs["translate_document"]
+
@property
def batch_translate_text(
self,
@@ -371,6 +391,42 @@ def batch_translate_text(
)
return self._stubs["batch_translate_text"]
+ @property
+ def batch_translate_document(
+ self,
+ ) -> Callable[
+ [translation_service.BatchTranslateDocumentRequest], operations.Operation
+ ]:
+ r"""Return a callable for the batch translate document method over gRPC.
+
+ Translates a large volume of documents in
+ asynchronous batch mode. This function provides real-
+ time output as the inputs are being processed. If caller
+ cancels a request, the partial results (for an input
+ file, it's all or nothing) may still be available on the
+ specified output location.
+ This call returns immediately and you can use
+ google.longrunning.Operation.name to poll the status of
+ the call.
+
+ Returns:
+ Callable[[~.BatchTranslateDocumentRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_translate_document" not in self._stubs:
+ self._stubs["batch_translate_document"] = self.grpc_channel.unary_unary(
+ "/google.cloud.translation.v3beta1.TranslationService/BatchTranslateDocument",
+ request_serializer=translation_service.BatchTranslateDocumentRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["batch_translate_document"]
+
@property
def create_glossary(
self,
diff --git a/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py b/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py
index 883c06a6..72617ed7 100644
--- a/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py
+++ b/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py
@@ -63,7 +63,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -103,6 +103,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -134,12 +135,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -148,78 +153,70 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
+ self._ssl_channel_credentials = None
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
else:
- host = host if ":" in host else host + ":443"
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
- self._stubs = {}
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
@@ -239,13 +236,13 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient(
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def translate_text(
@@ -335,6 +332,35 @@ def get_supported_languages(
)
return self._stubs["get_supported_languages"]
+ @property
+ def translate_document(
+ self,
+ ) -> Callable[
+ [translation_service.TranslateDocumentRequest],
+ Awaitable[translation_service.TranslateDocumentResponse],
+ ]:
+ r"""Return a callable for the translate document method over gRPC.
+
+ Translates documents in synchronous mode.
+
+ Returns:
+ Callable[[~.TranslateDocumentRequest],
+ Awaitable[~.TranslateDocumentResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "translate_document" not in self._stubs:
+ self._stubs["translate_document"] = self.grpc_channel.unary_unary(
+ "/google.cloud.translation.v3beta1.TranslationService/TranslateDocument",
+ request_serializer=translation_service.TranslateDocumentRequest.serialize,
+ response_deserializer=translation_service.TranslateDocumentResponse.deserialize,
+ )
+ return self._stubs["translate_document"]
+
@property
def batch_translate_text(
self,
@@ -371,6 +397,43 @@ def batch_translate_text(
)
return self._stubs["batch_translate_text"]
+ @property
+ def batch_translate_document(
+ self,
+ ) -> Callable[
+ [translation_service.BatchTranslateDocumentRequest],
+ Awaitable[operations.Operation],
+ ]:
+ r"""Return a callable for the batch translate document method over gRPC.
+
+ Translates a large volume of documents in
+ asynchronous batch mode. This function provides real-
+ time output as the inputs are being processed. If caller
+ cancels a request, the partial results (for an input
+ file, it's all or nothing) may still be available on the
+ specified output location.
+ This call returns immediately and you can use
+ google.longrunning.Operation.name to poll the status of
+ the call.
+
+ Returns:
+ Callable[[~.BatchTranslateDocumentRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_translate_document" not in self._stubs:
+ self._stubs["batch_translate_document"] = self.grpc_channel.unary_unary(
+ "/google.cloud.translation.v3beta1.TranslationService/BatchTranslateDocument",
+ request_serializer=translation_service.BatchTranslateDocumentRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["batch_translate_document"]
+
@property
def create_glossary(
self,
diff --git a/google/cloud/translate_v3beta1/types/__init__.py b/google/cloud/translate_v3beta1/types/__init__.py
index 60c0f41d..64ad8390 100644
--- a/google/cloud/translate_v3beta1/types/__init__.py
+++ b/google/cloud/translate_v3beta1/types/__init__.py
@@ -16,62 +16,81 @@
#
from .translation_service import (
- TranslateTextGlossaryConfig,
- TranslateTextRequest,
- TranslateTextResponse,
- Translation,
- DetectLanguageRequest,
- DetectedLanguage,
- DetectLanguageResponse,
- GetSupportedLanguagesRequest,
- SupportedLanguages,
- SupportedLanguage,
- GcsSource,
- InputConfig,
- GcsDestination,
- OutputConfig,
- BatchTranslateTextRequest,
+ BatchDocumentInputConfig,
+ BatchDocumentOutputConfig,
+ BatchTranslateDocumentMetadata,
+ BatchTranslateDocumentRequest,
+ BatchTranslateDocumentResponse,
BatchTranslateMetadata,
BatchTranslateResponse,
- GlossaryInputConfig,
- Glossary,
+ BatchTranslateTextRequest,
+ CreateGlossaryMetadata,
CreateGlossaryRequest,
- GetGlossaryRequest,
+ DeleteGlossaryMetadata,
DeleteGlossaryRequest,
+ DeleteGlossaryResponse,
+ DetectedLanguage,
+ DetectLanguageRequest,
+ DetectLanguageResponse,
+ DocumentInputConfig,
+ DocumentOutputConfig,
+ DocumentTranslation,
+ GcsDestination,
+ GcsSource,
+ GetGlossaryRequest,
+ GetSupportedLanguagesRequest,
+ Glossary,
+ GlossaryInputConfig,
+ InputConfig,
ListGlossariesRequest,
ListGlossariesResponse,
- CreateGlossaryMetadata,
- DeleteGlossaryMetadata,
- DeleteGlossaryResponse,
+ OutputConfig,
+ SupportedLanguage,
+ SupportedLanguages,
+ TranslateDocumentRequest,
+ TranslateDocumentResponse,
+ TranslateTextGlossaryConfig,
+ TranslateTextRequest,
+ TranslateTextResponse,
+ Translation,
)
-
__all__ = (
- "TranslateTextGlossaryConfig",
- "TranslateTextRequest",
- "TranslateTextResponse",
- "Translation",
- "DetectLanguageRequest",
- "DetectedLanguage",
- "DetectLanguageResponse",
- "GetSupportedLanguagesRequest",
- "SupportedLanguages",
- "SupportedLanguage",
- "GcsSource",
- "InputConfig",
- "GcsDestination",
- "OutputConfig",
- "BatchTranslateTextRequest",
+ "BatchDocumentInputConfig",
+ "BatchDocumentOutputConfig",
+ "BatchTranslateDocumentMetadata",
+ "BatchTranslateDocumentRequest",
+ "BatchTranslateDocumentResponse",
"BatchTranslateMetadata",
"BatchTranslateResponse",
- "GlossaryInputConfig",
- "Glossary",
+ "BatchTranslateTextRequest",
+ "CreateGlossaryMetadata",
"CreateGlossaryRequest",
- "GetGlossaryRequest",
+ "DeleteGlossaryMetadata",
"DeleteGlossaryRequest",
+ "DeleteGlossaryResponse",
+ "DetectedLanguage",
+ "DetectLanguageRequest",
+ "DetectLanguageResponse",
+ "DocumentInputConfig",
+ "DocumentOutputConfig",
+ "DocumentTranslation",
+ "GcsDestination",
+ "GcsSource",
+ "GetGlossaryRequest",
+ "GetSupportedLanguagesRequest",
+ "Glossary",
+ "GlossaryInputConfig",
+ "InputConfig",
"ListGlossariesRequest",
"ListGlossariesResponse",
- "CreateGlossaryMetadata",
- "DeleteGlossaryMetadata",
- "DeleteGlossaryResponse",
+ "OutputConfig",
+ "SupportedLanguage",
+ "SupportedLanguages",
+ "TranslateDocumentRequest",
+ "TranslateDocumentResponse",
+ "TranslateTextGlossaryConfig",
+ "TranslateTextRequest",
+ "TranslateTextResponse",
+ "Translation",
)
diff --git a/google/cloud/translate_v3beta1/types/translation_service.py b/google/cloud/translate_v3beta1/types/translation_service.py
index 5ee91b8f..3398dc13 100644
--- a/google/cloud/translate_v3beta1/types/translation_service.py
+++ b/google/cloud/translate_v3beta1/types/translation_service.py
@@ -38,6 +38,11 @@
"InputConfig",
"GcsDestination",
"OutputConfig",
+ "DocumentInputConfig",
+ "DocumentOutputConfig",
+ "TranslateDocumentRequest",
+ "DocumentTranslation",
+ "TranslateDocumentResponse",
"BatchTranslateTextRequest",
"BatchTranslateMetadata",
"BatchTranslateResponse",
@@ -51,6 +56,11 @@
"CreateGlossaryMetadata",
"DeleteGlossaryMetadata",
"DeleteGlossaryResponse",
+ "BatchTranslateDocumentRequest",
+ "BatchDocumentInputConfig",
+ "BatchDocumentOutputConfig",
+ "BatchTranslateDocumentResponse",
+ "BatchTranslateDocumentMetadata",
},
)
@@ -80,8 +90,9 @@ class TranslateTextRequest(proto.Message):
contents (Sequence[str]):
Required. The content of the input in string
format. We recommend the total content be less
- than 30k codepoints. Use BatchTranslateText for
- larger text.
+ than 30k codepoints. The max length of this
+ field is 1024.
+ Use BatchTranslateText for larger text.
mime_type (str):
Optional. The format of the source text, for
example, "text/html", "text/plain". If left
@@ -102,12 +113,12 @@ class TranslateTextRequest(proto.Message):
Required. Project or location to make a call. Must refer to
a caller's project.
- Format: ``projects/{project-id}`` or
- ``projects/{project-id}/locations/{location-id}``.
+ Format: ``projects/{project-number-or-id}`` or
+ ``projects/{project-number-or-id}/locations/{location-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
Non-global location is required for requests using AutoML
models or custom glossaries.
@@ -121,24 +132,24 @@ class TranslateTextRequest(proto.Message):
The format depends on model type:
- AutoML Translation models:
- ``projects/{project-id}/locations/{location-id}/models/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
- ``projects/{project-id}/locations/{location-id}/models/general/nmt``,
- ``projects/{project-id}/locations/{location-id}/models/general/base``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
For global (non-regionalized) requests, use ``location-id``
``global``. For example,
- ``projects/{project-id}/locations/global/models/general/nmt``.
+ ``projects/{project-number-or-id}/locations/global/models/general/nmt``.
If missing, the system decides which google base model to
use.
- glossary_config (~.translation_service.TranslateTextGlossaryConfig):
+ glossary_config (google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig):
Optional. Glossary to be applied. The glossary must be
within the same region (have the same location-id) as the
model, otherwise an INVALID_ARGUMENT (400) error is
returned.
- labels (Sequence[~.translation_service.TranslateTextRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.translate_v3beta1.types.TranslateTextRequest.LabelsEntry]):
Optional. The labels with user-defined
metadata for the request.
Label keys and values can be no longer than 63
@@ -165,7 +176,7 @@ class TranslateTextRequest(proto.Message):
model = proto.Field(proto.STRING, number=6)
glossary_config = proto.Field(
- proto.MESSAGE, number=7, message=TranslateTextGlossaryConfig,
+ proto.MESSAGE, number=7, message="TranslateTextGlossaryConfig",
)
labels = proto.MapField(proto.STRING, proto.STRING, number=10)
@@ -175,11 +186,11 @@ class TranslateTextResponse(proto.Message):
r"""
Attributes:
- translations (Sequence[~.translation_service.Translation]):
+ translations (Sequence[google.cloud.translate_v3beta1.types.Translation]):
Text translation responses with no glossary applied. This
field has the same length as
[``contents``][google.cloud.translation.v3beta1.TranslateTextRequest.contents].
- glossary_translations (Sequence[~.translation_service.Translation]):
+ glossary_translations (Sequence[google.cloud.translate_v3beta1.types.Translation]):
Text translation responses if a glossary is provided in the
request. This can be the same as
[``translations``][google.cloud.translation.v3beta1.TranslateTextResponse.translations]
@@ -201,8 +212,14 @@ class Translation(proto.Message):
translated_text (str):
Text translated into the target language.
model (str):
- Only present when ``model`` is present in the request. This
- is same as ``model`` provided in the request.
+ Only present when ``model`` is present in the request.
+ ``model`` here is normalized to have project number.
+
+ For example: If the ``model`` requested in
+ TranslationTextRequest is
+ ``projects/{project-id}/locations/{location-id}/models/general/nmt``
+ then ``model`` here would be normalized to
+ ``projects/{project-number}/locations/{location-id}/models/general/nmt``.
detected_language_code (str):
The BCP-47 language code of source text in
the initial request, detected automatically, if
@@ -210,7 +227,7 @@ class Translation(proto.Message):
request. If the source language was passed,
auto-detection of the language does not occur
and this field is empty.
- glossary_config (~.translation_service.TranslateTextGlossaryConfig):
+ glossary_config (google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig):
The ``glossary_config`` used for this translation.
"""
@@ -221,7 +238,7 @@ class Translation(proto.Message):
detected_language_code = proto.Field(proto.STRING, number=4)
glossary_config = proto.Field(
- proto.MESSAGE, number=3, message=TranslateTextGlossaryConfig,
+ proto.MESSAGE, number=3, message="TranslateTextGlossaryConfig",
)
@@ -233,24 +250,25 @@ class DetectLanguageRequest(proto.Message):
Required. Project or location to make a call. Must refer to
a caller's project.
- Format: ``projects/{project-id}/locations/{location-id}`` or
- ``projects/{project-id}``.
+ Format:
+ ``projects/{project-number-or-id}/locations/{location-id}``
+ or ``projects/{project-number-or-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
- Only models within the same region (has same location-id)
- can be used. Otherwise an INVALID_ARGUMENT (400) error is
- returned.
+ Only models within the same region, which have the same
+ location-id, can be used. Otherwise an INVALID_ARGUMENT
+ (400) error is returned.
model (str):
Optional. The language detection model to be used.
Format:
- ``projects/{project-id}/locations/{location-id}/models/language-detection/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}``
Only one language detection model is currently supported:
- ``projects/{project-id}/locations/{location-id}/models/language-detection/default``.
+ ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``.
If not specified, the default model is used.
content (str):
@@ -259,7 +277,7 @@ class DetectLanguageRequest(proto.Message):
Optional. The format of the source text, for
example, "text/html", "text/plain". If left
blank, the MIME type defaults to "text/html".
- labels (Sequence[~.translation_service.DetectLanguageRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.translate_v3beta1.types.DetectLanguageRequest.LabelsEntry]):
Optional. The labels with user-defined
metadata for the request.
Label keys and values can be no longer than 63
@@ -305,13 +323,15 @@ class DetectLanguageResponse(proto.Message):
r"""The response message for language detection.
Attributes:
- languages (Sequence[~.translation_service.DetectedLanguage]):
+ languages (Sequence[google.cloud.translate_v3beta1.types.DetectedLanguage]):
A list of detected languages sorted by
detection confidence in descending order. The
most probable language first.
"""
- languages = proto.RepeatedField(proto.MESSAGE, number=1, message=DetectedLanguage,)
+ languages = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="DetectedLanguage",
+ )
class GetSupportedLanguagesRequest(proto.Message):
@@ -322,12 +342,12 @@ class GetSupportedLanguagesRequest(proto.Message):
Required. Project or location to make a call. Must refer to
a caller's project.
- Format: ``projects/{project-id}`` or
- ``projects/{project-id}/locations/{location-id}``.
+ Format: ``projects/{project-number-or-id}`` or
+ ``projects/{project-number-or-id}/locations/{location-id}``.
For global calls, use
- ``projects/{project-id}/locations/global`` or
- ``projects/{project-id}``.
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
Non-global location is required for AutoML models.
@@ -345,11 +365,11 @@ class GetSupportedLanguagesRequest(proto.Message):
The format depends on model type:
- AutoML Translation models:
- ``projects/{project-id}/locations/{location-id}/models/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
- ``projects/{project-id}/locations/{location-id}/models/general/nmt``,
- ``projects/{project-id}/locations/{location-id}/models/general/base``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
Returns languages supported by the specified model. If
missing, we get supported languages of Google general base
@@ -367,7 +387,7 @@ class SupportedLanguages(proto.Message):
r"""The response message for discovering supported languages.
Attributes:
- languages (Sequence[~.translation_service.SupportedLanguage]):
+ languages (Sequence[google.cloud.translate_v3beta1.types.SupportedLanguage]):
A list of supported language responses. This
list contains an entry for each language the
Translation API supports.
@@ -429,7 +449,7 @@ class InputConfig(proto.Message):
"text/html" is used if mime_type is missing. For ``.html``,
this field must be "text/html" or empty. For ``.txt``, this
field must be "text/plain" or empty.
- gcs_source (~.translation_service.GcsSource):
+ gcs_source (google.cloud.translate_v3beta1.types.GcsSource):
Required. Google Cloud Storage location for the source
input. This can be a single file (for example,
``gs://translation-test/input.tsv``) or a wildcard (for
@@ -454,7 +474,7 @@ class InputConfig(proto.Message):
mime_type = proto.Field(proto.STRING, number=1)
gcs_source = proto.Field(
- proto.MESSAGE, number=2, oneof="source", message=GcsSource,
+ proto.MESSAGE, number=2, oneof="source", message="GcsSource",
)
@@ -476,7 +496,7 @@ class OutputConfig(proto.Message):
r"""Output configuration for BatchTranslateText request.
Attributes:
- gcs_destination (~.translation_service.GcsDestination):
+ gcs_destination (google.cloud.translate_v3beta1.types.GcsDestination):
Google Cloud Storage destination for output content. For
every single input file (for example,
gs://a/b/c.[extension]), we generate at most 2 \* n output
@@ -508,13 +528,18 @@ class OutputConfig(proto.Message):
content to output.
Once a row is present in index.csv, the input/output
- matching never changes. Callers should also expect all the
- content in input_file are processed and ready to be consumed
- (that is, no partial output file is written).
+ matching never changes. Callers should also expect the
+ contents in the input_file are processed and ready to be
+ consumed (that is, no partial output file is written).
+
+ Since index.csv will be updated during the process, please
+ make sure there is no custom retention policy applied on the
+ output bucket that may prevent file updating.
+ (https://cloud.google.com/storage/docs/bucket-lock?hl=en#retention-policy)
The format of translations_file (for target language code
'trg') is:
- ``gs://translation_test/a_b_c\_'trg'_translations.[extension]``
+ gs://translation_test/a_b_c\_'trg'_translations.[extension]
If the input file extension is tsv, the output has the
following columns: Column 1: ID of the request provided in
@@ -531,10 +556,10 @@ class OutputConfig(proto.Message):
directly written to the output file. If glossary is
requested, a separate glossary_translations_file has format
of
- ``gs://translation_test/a_b_c\_'trg'_glossary_translations.[extension]``
+ gs://translation_test/a_b_c\_'trg'_glossary_translations.[extension]
The format of errors file (for target language code 'trg')
- is: ``gs://translation_test/a_b_c\_'trg'_errors.[extension]``
+ is: gs://translation_test/a_b_c\_'trg'_errors.[extension]
If the input file extension is tsv, errors_file contains the
following: Column 1: ID of the request provided in the
@@ -547,11 +572,284 @@ class OutputConfig(proto.Message):
If the input file extension is txt or html,
glossary_error_file will be generated that contains error
details. glossary_error_file has format of
- ``gs://translation_test/a_b_c\_'trg'_glossary_errors.[extension]``
+ gs://translation_test/a_b_c\_'trg'_glossary_errors.[extension]
"""
gcs_destination = proto.Field(
- proto.MESSAGE, number=1, oneof="destination", message=GcsDestination,
+ proto.MESSAGE, number=1, oneof="destination", message="GcsDestination",
+ )
+
+
+class DocumentInputConfig(proto.Message):
+ r"""A document translation request input config.
+
+ Attributes:
+ content (bytes):
+ Document's content represented as a stream of
+ bytes.
+ gcs_source (google.cloud.translate_v3beta1.types.GcsSource):
+ Google Cloud Storage location. This must be a single file.
+ For example: gs://example_bucket/example_file.pdf
+ mime_type (str):
+ Specifies the input document's mime_type.
+
+ If not specified it will be determined using the file
+ extension for gcs_source provided files. For a file provided
+ through bytes content the mime_type must be provided.
+ Currently supported mime types are:
+
+ - application/pdf
+ - application/vnd.openxmlformats-officedocument.wordprocessingml.document
+ - application/vnd.openxmlformats-officedocument.presentationml.presentation
+ - application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
+ """
+
+ content = proto.Field(proto.BYTES, number=1, oneof="source")
+
+ gcs_source = proto.Field(
+ proto.MESSAGE, number=2, oneof="source", message="GcsSource",
+ )
+
+ mime_type = proto.Field(proto.STRING, number=4)
+
+
+class DocumentOutputConfig(proto.Message):
+ r"""A document translation request output config.
+
+ Attributes:
+ gcs_destination (google.cloud.translate_v3beta1.types.GcsDestination):
+ Optional. Google Cloud Storage destination for the
+ translation output, e.g., ``gs://my_bucket/my_directory/``.
+
+ The destination directory provided does not have to be
+ empty, but the bucket must exist. If a file with the same
+ name as the output file already exists in the destination an
+ error will be returned.
+
+ For a DocumentInputConfig.contents provided document, the
+ output file will have the name
+ "output_[trg]_translations.[ext]", where
+
+ - [trg] corresponds to the translated file's language code,
+ - [ext] corresponds to the translated file's extension
+ according to its mime type.
+
+ For a DocumentInputConfig.gcs_uri provided document, the
+ output file will have a name according to its URI. For
+ example: an input file with URI: "gs://a/b/c.[extension]"
+ stored in a gcs_destination bucket with name "my_bucket"
+ will have an output URI:
+ "gs://my_bucket/a_b_c\_[trg]_translations.[ext]", where
+
+ - [trg] corresponds to the translated file's language code,
+ - [ext] corresponds to the translated file's extension
+ according to its mime type.
+
+ If the document was directly provided through the request,
+ then the output document will have the format:
+ "gs://my_bucket/translated_document_[trg]_translations.[ext],
+ where
+
+ - [trg] corresponds to the translated file's language code,
+ - [ext] corresponds to the translated file's extension
+ according to its mime type.
+
+ If a glossary was provided, then the output URI for the
+ glossary translation will be equal to the default output URI
+ but have ``glossary_translations`` instead of
+ ``translations``. For the previous example, its glossary URI
+ would be:
+ "gs://my_bucket/a_b_c\_[trg]_glossary_translations.[ext]".
+
+ Thus the max number of output files will be 2 (Translated
+ document, Glossary translated document).
+
+ Callers should expect no partial outputs. If there is any
+ error during document translation, no output will be stored
+ in the Cloud Storage bucket.
+ mime_type (str):
+ Optional. Specifies the translated document's mime_type. If
+ not specified, the translated file's mime type will be the
+ same as the input file's mime type. Currently only support
+ the output mime type to be the same as input mime type.
+
+ - application/pdf
+ - application/vnd.openxmlformats-officedocument.wordprocessingml.document
+ - application/vnd.openxmlformats-officedocument.presentationml.presentation
+ - application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
+ """
+
+ gcs_destination = proto.Field(
+ proto.MESSAGE, number=1, oneof="destination", message="GcsDestination",
+ )
+
+ mime_type = proto.Field(proto.STRING, number=3)
+
+
+class TranslateDocumentRequest(proto.Message):
+ r"""A document translation request.
+
+ Attributes:
+ parent (str):
+ Required. Location to make a regional call.
+
+ Format:
+ ``projects/{project-number-or-id}/locations/{location-id}``.
+
+ For global calls, use
+ ``projects/{project-number-or-id}/locations/global`` or
+ ``projects/{project-number-or-id}``.
+
+ Non-global location is required for requests using AutoML
+ models or custom glossaries.
+
+ Models and glossaries must be within the same region (have
+ the same location-id), otherwise an INVALID_ARGUMENT (400)
+ error is returned.
+ source_language_code (str):
+ Optional. The BCP-47 language code of the
+ input document if known, for example, "en-US" or
+ "sr-Latn". Supported language codes are listed
+ in Language Support. If the source language
+ isn't specified, the API attempts to identify
+ the source language automatically and returns
+ the source language within the response. Source
+ language must be specified if the request
+ contains a glossary or a custom model.
+ target_language_code (str):
+ Required. The BCP-47 language code to use for
+ translation of the input document, set to one of
+ the language codes listed in Language Support.
+ document_input_config (google.cloud.translate_v3beta1.types.DocumentInputConfig):
+ Required. Input configurations.
+ document_output_config (google.cloud.translate_v3beta1.types.DocumentOutputConfig):
+ Optional. Output configurations.
+ Defines if the output file should be stored
+ within Cloud Storage as well as the desired
+ output format. If not provided the translated
+ file will only be returned through a byte-stream
+ and its output mime type will be the same as the
+ input file's mime type.
+ model (str):
+ Optional. The ``model`` type requested for this translation.
+
+ The format depends on model type:
+
+ - AutoML Translation models:
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
+
+ - General (built-in) models:
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
+
+ If not provided, the default Google model (NMT) will be used
+ for translation.
+ glossary_config (google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig):
+ Optional. Glossary to be applied. The glossary must be
+ within the same region (have the same location-id) as the
+ model, otherwise an INVALID_ARGUMENT (400) error is
+ returned.
+ labels (Sequence[google.cloud.translate_v3beta1.types.TranslateDocumentRequest.LabelsEntry]):
+ Optional. The labels with user-defined
+ metadata for the request.
+ Label keys and values can be no longer than 63
+ characters (Unicode codepoints), can only
+ contain lowercase letters, numeric characters,
+ underscores and dashes. International characters
+ are allowed. Label values are optional. Label
+ keys must start with a letter.
+ See
+ https://cloud.google.com/translate/docs/advanced/labels
+ for more information.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ source_language_code = proto.Field(proto.STRING, number=2)
+
+ target_language_code = proto.Field(proto.STRING, number=3)
+
+ document_input_config = proto.Field(
+ proto.MESSAGE, number=4, message="DocumentInputConfig",
+ )
+
+ document_output_config = proto.Field(
+ proto.MESSAGE, number=5, message="DocumentOutputConfig",
+ )
+
+ model = proto.Field(proto.STRING, number=6)
+
+ glossary_config = proto.Field(
+ proto.MESSAGE, number=7, message="TranslateTextGlossaryConfig",
+ )
+
+ labels = proto.MapField(proto.STRING, proto.STRING, number=8)
+
+
+class DocumentTranslation(proto.Message):
+ r"""A translated document message.
+
+ Attributes:
+ byte_stream_outputs (Sequence[bytes]):
+ The array of translated documents. It is
+ expected to be size 1 for now. We may produce
+ multiple translated documents in the future for
+ other type of file formats.
+ mime_type (str):
+ The translated document's mime type.
+ detected_language_code (str):
+ The detected language for the input document.
+ If the user did not provide the source language
+ for the input document, this field will have the
+ language code automatically detected. If the
+ source language was passed, auto-detection of
+ the language does not occur and this field is
+ empty.
+ """
+
+ byte_stream_outputs = proto.RepeatedField(proto.BYTES, number=1)
+
+ mime_type = proto.Field(proto.STRING, number=2)
+
+ detected_language_code = proto.Field(proto.STRING, number=3)
+
+
+class TranslateDocumentResponse(proto.Message):
+ r"""A translated document response message.
+
+ Attributes:
+ document_translation (google.cloud.translate_v3beta1.types.DocumentTranslation):
+ Translated document.
+ glossary_document_translation (google.cloud.translate_v3beta1.types.DocumentTranslation):
+ The document's translation output if a glossary is provided
+ in the request. This can be the same as
+ [TranslateDocumentResponse.document_translation] if no
+ glossary terms apply.
+ model (str):
+ Only present when 'model' is present in the request. 'model'
+ is normalized to have a project number.
+
+ For example: If the 'model' field in
+ TranslateDocumentRequest is:
+ ``projects/{project-id}/locations/{location-id}/models/general/nmt``
+ then ``model`` here would be normalized to
+ ``projects/{project-number}/locations/{location-id}/models/general/nmt``.
+ glossary_config (google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig):
+ The ``glossary_config`` used for this translation.
+ """
+
+ document_translation = proto.Field(
+ proto.MESSAGE, number=1, message="DocumentTranslation",
+ )
+
+ glossary_document_translation = proto.Field(
+ proto.MESSAGE, number=2, message="DocumentTranslation",
+ )
+
+ model = proto.Field(proto.STRING, number=3)
+
+ glossary_config = proto.Field(
+ proto.MESSAGE, number=4, message="TranslateTextGlossaryConfig",
)
@@ -563,7 +861,8 @@ class BatchTranslateTextRequest(proto.Message):
Required. Location to make a call. Must refer to a caller's
project.
- Format: ``projects/{project-id}/locations/{location-id}``.
+ Format:
+ ``projects/{project-number-or-id}/locations/{location-id}``.
The ``global`` location is not supported for batch
translation.
@@ -576,37 +875,38 @@ class BatchTranslateTextRequest(proto.Message):
target_language_codes (Sequence[str]):
Required. Specify up to 10 language codes
here.
- models (Sequence[~.translation_service.BatchTranslateTextRequest.ModelsEntry]):
+ models (Sequence[google.cloud.translate_v3beta1.types.BatchTranslateTextRequest.ModelsEntry]):
Optional. The models to use for translation. Map's key is
- target language code. Map's value is model name. Value can
- be a built-in general model, or an AutoML Translation model.
+ target language code. Map's value is the model name. Value
+ can be a built-in general model, or an AutoML Translation
+ model.
The value format depends on model type:
- AutoML Translation models:
- ``projects/{project-id}/locations/{location-id}/models/{model-id}``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
- ``projects/{project-id}/locations/{location-id}/models/general/nmt``,
- ``projects/{project-id}/locations/{location-id}/models/general/base``
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
If the map is empty or a specific model is not requested for
a language pair, then default google model (nmt) is used.
- input_configs (Sequence[~.translation_service.InputConfig]):
+ input_configs (Sequence[google.cloud.translate_v3beta1.types.InputConfig]):
Required. Input configurations.
The total number of files matched should be <=
- 1000. The total content size should be <= 100M
+ 100. The total content size should be <= 100M
Unicode codepoints. The files must use UTF-8
encoding.
- output_config (~.translation_service.OutputConfig):
+ output_config (google.cloud.translate_v3beta1.types.OutputConfig):
Required. Output configuration.
If 2 input configs match to the same file (that
is, same input path), we don't generate output
for duplicate inputs.
- glossaries (Sequence[~.translation_service.BatchTranslateTextRequest.GlossariesEntry]):
+ glossaries (Sequence[google.cloud.translate_v3beta1.types.BatchTranslateTextRequest.GlossariesEntry]):
Optional. Glossaries to be applied for
translation. It's keyed by target language code.
- labels (Sequence[~.translation_service.BatchTranslateTextRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.translate_v3beta1.types.BatchTranslateTextRequest.LabelsEntry]):
Optional. The labels with user-defined
metadata for the request.
Label keys and values can be no longer than 63
@@ -628,12 +928,12 @@ class BatchTranslateTextRequest(proto.Message):
models = proto.MapField(proto.STRING, proto.STRING, number=4)
- input_configs = proto.RepeatedField(proto.MESSAGE, number=5, message=InputConfig,)
+ input_configs = proto.RepeatedField(proto.MESSAGE, number=5, message="InputConfig",)
- output_config = proto.Field(proto.MESSAGE, number=6, message=OutputConfig,)
+ output_config = proto.Field(proto.MESSAGE, number=6, message="OutputConfig",)
glossaries = proto.MapField(
- proto.STRING, proto.MESSAGE, number=7, message=TranslateTextGlossaryConfig,
+ proto.STRING, proto.MESSAGE, number=7, message="TranslateTextGlossaryConfig",
)
labels = proto.MapField(proto.STRING, proto.STRING, number=9)
@@ -643,7 +943,7 @@ class BatchTranslateMetadata(proto.Message):
r"""State metadata for the batch translation operation.
Attributes:
- state (~.translation_service.BatchTranslateMetadata.State):
+ state (google.cloud.translate_v3beta1.types.BatchTranslateMetadata.State):
The state of the operation.
translated_characters (int):
Number of successfully translated characters
@@ -657,7 +957,7 @@ class BatchTranslateMetadata(proto.Message):
codepoints from input files times the number of
target languages and appears here shortly after
the call is submitted.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
Time when the operation was submitted.
"""
@@ -697,9 +997,9 @@ class BatchTranslateResponse(proto.Message):
failed_characters (int):
Number of characters that have failed to
process (Unicode codepoints).
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
Time when the operation was submitted.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation is finished and
[google.longrunning.Operation.done][google.longrunning.Operation.done]
is set to true.
@@ -720,7 +1020,7 @@ class GlossaryInputConfig(proto.Message):
r"""Input configuration for glossaries.
Attributes:
- gcs_source (~.translation_service.GcsSource):
+ gcs_source (google.cloud.translate_v3beta1.types.GcsSource):
Required. Google Cloud Storage location of glossary data.
File format is determined based on the filename extension.
API returns [google.rpc.Code.INVALID_ARGUMENT] for
@@ -749,7 +1049,7 @@ class GlossaryInputConfig(proto.Message):
"""
gcs_source = proto.Field(
- proto.MESSAGE, number=1, oneof="source", message=GcsSource,
+ proto.MESSAGE, number=1, oneof="source", message="GcsSource",
)
@@ -760,21 +1060,21 @@ class Glossary(proto.Message):
name (str):
Required. The resource name of the glossary. Glossary names
have the form
- ``projects/{project-id}/locations/{location-id}/glossaries/{glossary-id}``.
- language_pair (~.translation_service.Glossary.LanguageCodePair):
+ ``projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}``.
+ language_pair (google.cloud.translate_v3beta1.types.Glossary.LanguageCodePair):
Used with unidirectional glossaries.
- language_codes_set (~.translation_service.Glossary.LanguageCodesSet):
+ language_codes_set (google.cloud.translate_v3beta1.types.Glossary.LanguageCodesSet):
Used with equivalent term set glossaries.
- input_config (~.translation_service.GlossaryInputConfig):
+ input_config (google.cloud.translate_v3beta1.types.GlossaryInputConfig):
Required. Provides examples to build the
glossary from. Total glossary must not exceed
10M Unicode codepoints.
entry_count (int):
Output only. The number of entries defined in
the glossary.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When CreateGlossary was called.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When the glossary creation was
finished.
"""
@@ -820,7 +1120,7 @@ class LanguageCodesSet(proto.Message):
proto.MESSAGE, number=4, oneof="languages", message=LanguageCodesSet,
)
- input_config = proto.Field(proto.MESSAGE, number=5, message=GlossaryInputConfig,)
+ input_config = proto.Field(proto.MESSAGE, number=5, message="GlossaryInputConfig",)
entry_count = proto.Field(proto.INT32, number=6)
@@ -835,13 +1135,13 @@ class CreateGlossaryRequest(proto.Message):
Attributes:
parent (str):
Required. The project name.
- glossary (~.translation_service.Glossary):
+ glossary (google.cloud.translate_v3beta1.types.Glossary):
Required. The glossary to create.
"""
parent = proto.Field(proto.STRING, number=1)
- glossary = proto.Field(proto.MESSAGE, number=2, message=Glossary,)
+ glossary = proto.Field(proto.MESSAGE, number=2, message="Glossary",)
class GetGlossaryRequest(proto.Message):
@@ -887,9 +1187,29 @@ class ListGlossariesRequest(proto.Message):
is returned if ``page_token``\ is empty or missing.
filter (str):
Optional. Filter specifying constraints of a
- list operation. Filtering is not supported yet,
- and the parameter currently has no effect. If
- missing, no filtering is performed.
+ list operation. Specify the constraint by the
+ format of "key=value", where key must be "src"
+ or "tgt", and the value must be a valid language
+ code. For multiple restrictions, concatenate
+ them by "AND" (uppercase only), such as:
+ "src=en-US AND tgt=zh-CN". Notice that the exact
+ match is used here, which means using 'en-US'
+ and 'en' can lead to different results, which
+ depends on the language code you used when you
+ create the glossary. For the unidirectional
+ glossaries, the "src" and "tgt" add restrictions
+ on the source and target language code
+ separately. For the equivalent term set
+ glossaries, the "src" and/or "tgt" add
+ restrictions on the term set.
+ For example: "src=en-US AND tgt=zh-CN" will only
+ pick the unidirectional glossaries which exactly
+ match the source language code as "en-US" and
+ the target language code "zh-CN", but all
+ equivalent term set glossaries which contain
+ "en-US" and "zh-CN" in their language set will
+ be picked. If missing, no filtering is
+ performed.
"""
parent = proto.Field(proto.STRING, number=1)
@@ -905,7 +1225,7 @@ class ListGlossariesResponse(proto.Message):
r"""Response message for ListGlossaries.
Attributes:
- glossaries (Sequence[~.translation_service.Glossary]):
+ glossaries (Sequence[google.cloud.translate_v3beta1.types.Glossary]):
The list of glossaries for a project.
next_page_token (str):
A token to retrieve a page of results. Pass this value in
@@ -918,7 +1238,7 @@ class ListGlossariesResponse(proto.Message):
def raw_page(self):
return self
- glossaries = proto.RepeatedField(proto.MESSAGE, number=1, message=Glossary,)
+ glossaries = proto.RepeatedField(proto.MESSAGE, number=1, message="Glossary",)
next_page_token = proto.Field(proto.STRING, number=2)
@@ -932,10 +1252,10 @@ class CreateGlossaryMetadata(proto.Message):
name (str):
The name of the glossary that is being
created.
- state (~.translation_service.CreateGlossaryMetadata.State):
+ state (google.cloud.translate_v3beta1.types.CreateGlossaryMetadata.State):
The current state of the glossary creation
operation.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation was submitted to
the server.
"""
@@ -967,10 +1287,10 @@ class DeleteGlossaryMetadata(proto.Message):
name (str):
The name of the glossary that is being
deleted.
- state (~.translation_service.DeleteGlossaryMetadata.State):
+ state (google.cloud.translate_v3beta1.types.DeleteGlossaryMetadata.State):
The current state of the glossary deletion
operation.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation was submitted to
the server.
"""
@@ -1001,10 +1321,10 @@ class DeleteGlossaryResponse(proto.Message):
Attributes:
name (str):
The name of the deleted glossary.
- submit_time (~.timestamp.Timestamp):
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the operation was submitted to
the server.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time when the glossary deletion is finished and
[google.longrunning.Operation.done][google.longrunning.Operation.done]
is set to true.
@@ -1017,4 +1337,314 @@ class DeleteGlossaryResponse(proto.Message):
end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
+class BatchTranslateDocumentRequest(proto.Message):
+ r"""The BatchTranslateDocument request.
+
+ Attributes:
+ parent (str):
+ Required. Location to make a regional call.
+
+ Format:
+ ``projects/{project-number-or-id}/locations/{location-id}``.
+
+ The ``global`` location is not supported for batch
+ translation.
+
+ Only AutoML Translation models or glossaries within the same
+ region (have the same location-id) can be used, otherwise an
+ INVALID_ARGUMENT (400) error is returned.
+ source_language_code (str):
+ Required. The BCP-47 language code of the
+ input document if known, for example, "en-US" or
+ "sr-Latn". Supported language codes are listed
+ in Language Support
+ (https://cloud.google.com/translate/docs/languages).
+ target_language_codes (Sequence[str]):
+ Required. The BCP-47 language code to use for
+ translation of the input document. Specify up to
+ 10 language codes here.
+ input_configs (Sequence[google.cloud.translate_v3beta1.types.BatchDocumentInputConfig]):
+ Required. Input configurations.
+ The total number of files matched should be <=
+ 100. The total content size to translate should
+ be <= 100M Unicode codepoints. The files must
+ use UTF-8 encoding.
+ output_config (google.cloud.translate_v3beta1.types.BatchDocumentOutputConfig):
+ Required. Output configuration.
+ If 2 input configs match to the same file (that
+ is, same input path), we don't generate output
+ for duplicate inputs.
+ models (Sequence[google.cloud.translate_v3beta1.types.BatchTranslateDocumentRequest.ModelsEntry]):
+ Optional. The models to use for translation. Map's key is
+ target language code. Map's value is the model name. Value
+ can be a built-in general model, or an AutoML Translation
+ model.
+
+ The value format depends on model type:
+
+ - AutoML Translation models:
+ ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
+
+ - General (built-in) models:
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
+ ``projects/{project-number-or-id}/locations/{location-id}/models/general/base``
+
+ If the map is empty or a specific model is not requested for
+ a language pair, then default google model (nmt) is used.
+ glossaries (Sequence[google.cloud.translate_v3beta1.types.BatchTranslateDocumentRequest.GlossariesEntry]):
+ Optional. Glossaries to be applied. It's
+ keyed by target language code.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ source_language_code = proto.Field(proto.STRING, number=2)
+
+ target_language_codes = proto.RepeatedField(proto.STRING, number=3)
+
+ input_configs = proto.RepeatedField(
+ proto.MESSAGE, number=4, message="BatchDocumentInputConfig",
+ )
+
+ output_config = proto.Field(
+ proto.MESSAGE, number=5, message="BatchDocumentOutputConfig",
+ )
+
+ models = proto.MapField(proto.STRING, proto.STRING, number=6)
+
+ glossaries = proto.MapField(
+ proto.STRING, proto.MESSAGE, number=7, message="TranslateTextGlossaryConfig",
+ )
+
+
+class BatchDocumentInputConfig(proto.Message):
+ r"""Input configuration for BatchTranslateDocument request.
+
+ Attributes:
+ gcs_source (google.cloud.translate_v3beta1.types.GcsSource):
+ Google Cloud Storage location for the source input. This can
+ be a single file (for example,
+ ``gs://translation-test/input.docx``) or a wildcard (for
+ example, ``gs://translation-test/*``).
+
+ File mime type is determined based on extension. Supported
+ mime type includes:
+
+ - ``pdf``, application/pdf
+ - ``docx``,
+ application/vnd.openxmlformats-officedocument.wordprocessingml.document
+ - ``pptx``,
+ application/vnd.openxmlformats-officedocument.presentationml.presentation
+ - ``xlsx``,
+ application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
+
+ The max file size supported for ``.docx``, ``.pptx`` and
+ ``.xlsx`` is 100MB. The max file size supported for ``.pdf``
+ is 1GB and the max page limit is 1000 pages. The max file
+ size supported for all input documents is 1GB.
+ """
+
+ gcs_source = proto.Field(
+ proto.MESSAGE, number=1, oneof="source", message="GcsSource",
+ )
+
+
+class BatchDocumentOutputConfig(proto.Message):
+ r"""Output configuration for BatchTranslateDocument request.
+
+ Attributes:
+ gcs_destination (google.cloud.translate_v3beta1.types.GcsDestination):
+ Google Cloud Storage destination for output content. For
+ every single input document (for example,
+ gs://a/b/c.[extension]), we generate at most 2 \* n output
+ files. (n is the # of target_language_codes in the
+ BatchTranslateDocumentRequest).
+
+ While the input documents are being processed, we
+ write/update an index file ``index.csv`` under
+ ``gcs_destination.output_uri_prefix`` (for example,
+ gs://translation_output/index.csv) The index file is
+ generated/updated as new files are being translated. The
+ format is:
+
+ input_document,target_language_code,translation_output,error_output,
+ glossary_translation_output,glossary_error_output
+
+ ``input_document`` is one file we matched using
+ gcs_source.input_uri. ``target_language_code`` is provided
+ in the request. ``translation_output`` contains the
+ translations. (details provided below) ``error_output``
+ contains the error message during processing of the file.
+ Both translations_file and errors_file could be empty
+ strings if we have no content to output.
+ ``glossary_translation_output`` and
+ ``glossary_error_output`` are the translated output/error
+ when we apply glossaries. They could also be empty if we
+ have no content to output.
+
+ Once a row is present in index.csv, the input/output
+ matching never changes. Callers should also expect all the
+ content in input_file are processed and ready to be consumed
+ (that is, no partial output file is written).
+
+ Since index.csv will be keeping updated during the process,
+ please make sure there is no custom retention policy applied
+ on the output bucket that may avoid file updating.
+ (https://cloud.google.com/storage/docs/bucket-lock?hl=en#retention-policy)
+
+ The naming format of translation output files follows (for
+ target language code [trg]): ``translation_output``:
+ gs://translation_output/a_b_c\_[trg]\ *translation.[extension]
+ ``glossary_translation_output``:
+ gs://translation_test/a_b_c*\ [trg]_glossary_translation.[extension]
+ The output document will maintain the same file format as
+ the input document.
+
+ The naming format of error output files follows (for target
+ language code [trg]): ``error_output``:
+ gs://translation_test/a_b_c\_[trg]\ *errors.txt
+ ``glossary_error_output``:
+ gs://translation_test/a_b_c*\ [trg]_glossary_translation.txt
+ The error output is a txt file containing error details.
+ """
+
+ gcs_destination = proto.Field(
+ proto.MESSAGE, number=1, oneof="destination", message="GcsDestination",
+ )
+
+
+class BatchTranslateDocumentResponse(proto.Message):
+ r"""Stored in the
+ [google.longrunning.Operation.response][google.longrunning.Operation.response]
+ field returned by BatchTranslateDocument if at least one document is
+ translated successfully.
+
+ Attributes:
+ total_pages (int):
+ Total number of pages to translate in all
+ documents. Documents without a clear page
+ definition (such as XLSX) are not counted.
+ translated_pages (int):
+ Number of successfully translated pages in
+ all documents. Documents without a clear page
+ definition (such as XLSX) are not counted.
+ failed_pages (int):
+ Number of pages that failed to process in all
+ documents. Documents without a clear page
+ definition (such as XLSX) are not counted.
+ total_billable_pages (int):
+ Number of billable pages in documents with
+ clear page definition (such as PDF, DOCX, PPTX)
+ total_characters (int):
+ Total number of characters (Unicode
+ codepoints) in all documents.
+ translated_characters (int):
+ Number of successfully translated characters
+ (Unicode codepoints) in all documents.
+ failed_characters (int):
+ Number of characters that have failed to
+ process (Unicode codepoints) in all documents.
+ total_billable_characters (int):
+ Number of billable characters (Unicode
+ codepoints) in documents without clear page
+ definition, such as XLSX.
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
+ Time when the operation was submitted.
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
+ The time when the operation is finished and
+ [google.longrunning.Operation.done][google.longrunning.Operation.done]
+ is set to true.
+ """
+
+ total_pages = proto.Field(proto.INT64, number=1)
+
+ translated_pages = proto.Field(proto.INT64, number=2)
+
+ failed_pages = proto.Field(proto.INT64, number=3)
+
+ total_billable_pages = proto.Field(proto.INT64, number=4)
+
+ total_characters = proto.Field(proto.INT64, number=5)
+
+ translated_characters = proto.Field(proto.INT64, number=6)
+
+ failed_characters = proto.Field(proto.INT64, number=7)
+
+ total_billable_characters = proto.Field(proto.INT64, number=8)
+
+ submit_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,)
+
+
+class BatchTranslateDocumentMetadata(proto.Message):
+ r"""State metadata for the batch translation operation.
+
+ Attributes:
+ state (google.cloud.translate_v3beta1.types.BatchTranslateDocumentMetadata.State):
+ The state of the operation.
+ total_pages (int):
+ Total number of pages to translate in all
+ documents so far. Documents without clear page
+ definition (such as XLSX) are not counted.
+ translated_pages (int):
+ Number of successfully translated pages in
+ all documents so far. Documents without clear
+ page definition (such as XLSX) are not counted.
+ failed_pages (int):
+ Number of pages that failed to process in all
+ documents so far. Documents without clear page
+ definition (such as XLSX) are not counted.
+ total_billable_pages (int):
+ Number of billable pages in documents with
+ clear page definition (such as PDF, DOCX, PPTX)
+ so far.
+ total_characters (int):
+ Total number of characters (Unicode
+ codepoints) in all documents so far.
+ translated_characters (int):
+ Number of successfully translated characters
+ (Unicode codepoints) in all documents so far.
+ failed_characters (int):
+ Number of characters that have failed to
+ process (Unicode codepoints) in all documents so
+ far.
+ total_billable_characters (int):
+ Number of billable characters (Unicode
+ codepoints) in documents without clear page
+ definition (such as XLSX) so far.
+ submit_time (google.protobuf.timestamp_pb2.Timestamp):
+ Time when the operation was submitted.
+ """
+
+ class State(proto.Enum):
+ r"""State of the job."""
+ STATE_UNSPECIFIED = 0
+ RUNNING = 1
+ SUCCEEDED = 2
+ FAILED = 3
+ CANCELLING = 4
+ CANCELLED = 5
+
+ state = proto.Field(proto.ENUM, number=1, enum=State,)
+
+ total_pages = proto.Field(proto.INT64, number=2)
+
+ translated_pages = proto.Field(proto.INT64, number=3)
+
+ failed_pages = proto.Field(proto.INT64, number=4)
+
+ total_billable_pages = proto.Field(proto.INT64, number=5)
+
+ total_characters = proto.Field(proto.INT64, number=6)
+
+ translated_characters = proto.Field(proto.INT64, number=7)
+
+ failed_characters = proto.Field(proto.INT64, number=8)
+
+ total_billable_characters = proto.Field(proto.INT64, number=9)
+
+ submit_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,)
+
+
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index 845cd5f9..0eb433e2 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -18,6 +18,7 @@
from __future__ import absolute_import
import os
+import pathlib
import shutil
import nox
@@ -28,7 +29,23 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -70,18 +87,23 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
- session.install("asyncmock", "pytest-asyncio")
- session.install("mock", "pytest", "pytest-cov")
- session.install("-e", ".")
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
+
+ session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
- "--cov=google.cloud.translate",
- "--cov=google.cloud",
- "--cov=tests.unit",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google/cloud",
+ "--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
@@ -100,6 +122,9 @@ def unit(session):
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
@@ -109,6 +134,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -121,16 +149,26 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install(
- "mock", "pytest", "google-cloud-testutils",
- )
- session.install("-e", ".")
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -173,9 +211,7 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
- # https://github.com/docascode/sphinx-docfx-yaml/issues/97
- session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/renovate.json b/renovate.json
index 4fa94931..f08bc22c 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,6 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"]
}
diff --git a/samples/snippets/beta_snippets_test.py b/samples/snippets/beta_snippets_test.py
index 7e0c2dc6..dcb396ed 100644
--- a/samples/snippets/beta_snippets_test.py
+++ b/samples/snippets/beta_snippets_test.py
@@ -75,7 +75,7 @@ def test_batch_translate_text(capsys, bucket):
beta_snippets.batch_translate_text(
PROJECT_ID,
"gs://cloud-samples-data/translation/text.txt",
- "gs://{}/translation/BATCH_TRANSLATION_OUTPUT/".format(bucket.name),
+ "gs://{}/translation/BATCH_TRANSLATION_BETA_OUTPUT/".format(bucket.name),
)
out, _ = capsys.readouterr()
assert "Total Characters: 13" in out
diff --git a/samples/snippets/hybrid_glossaries/noxfile.py b/samples/snippets/hybrid_glossaries/noxfile.py
index f3a90583..97bf7da8 100644
--- a/samples/snippets/hybrid_glossaries/noxfile.py
+++ b/samples/snippets/hybrid_glossaries/noxfile.py
@@ -17,6 +17,7 @@
import os
from pathlib import Path
import sys
+from typing import Callable, Dict, List, Optional
import nox
@@ -37,22 +38,28 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7"],
+ 'ignored_versions': ["2.7"],
+
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ 'enforce_type_hints': False,
+
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- "envs": {},
+ 'envs': {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append(".")
+ sys.path.append('.')
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -62,26 +69,26 @@
TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
-def get_pytest_env_vars():
+def get_pytest_env_vars() -> Dict[str, str]:
"""Returns a dict for pytest invocation."""
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG["gcloud_project_env"]
+ env_key = TEST_CONFIG['gcloud_project_env']
# This should error out if not set.
- ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
+ ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG["envs"])
+ ret.update(TEST_CONFIG['envs'])
return ret
# DO NOT EDIT - automatically generated.
# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
+IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
@@ -91,7 +98,7 @@ def get_pytest_env_vars():
#
-def _determine_local_import_names(start_dir):
+def _determine_local_import_names(start_dir: str) -> List[str]:
"""Determines all import names that should be considered "local".
This is used when running the linter to insure that import order is
@@ -129,17 +136,30 @@ def _determine_local_import_names(start_dir):
@nox.session
-def lint(session):
- session.install("flake8", "flake8-import-order")
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG['enforce_type_hints']:
+ session.install("flake8", "flake8-import-order")
+ else:
+ session.install("flake8", "flake8-import-order", "flake8-annotations")
local_names = _determine_local_import_names(".")
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- ".",
+ "."
]
session.run("flake8", *args)
+#
+# Black
+#
+
+
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ session.install("black")
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+ session.run("black", *python_files)
#
# Sample Tests
@@ -149,7 +169,7 @@ def lint(session):
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session, post_install=None):
+def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
"""Runs py.test for a particular project."""
if os.path.exists("requirements.txt"):
session.install("-r", "requirements.txt")
@@ -175,14 +195,14 @@ def _session_tests(session, post_install=None):
@nox.session(python=ALL_VERSIONS)
-def py(session):
+def py(session: nox.sessions.Session) -> None:
"""Runs py.test for a sample using the specified version of Python."""
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
- session.skip(
- "SKIPPED: {} tests are disabled for this sample.".format(session.python)
- )
+ session.skip("SKIPPED: {} tests are disabled for this sample.".format(
+ session.python
+ ))
#
@@ -190,7 +210,7 @@ def py(session):
#
-def _get_repo_root():
+def _get_repo_root() -> Optional[str]:
""" Returns the root folder of the project. """
# Get root of this repository. Assume we don't have directories nested deeper than 10 items.
p = Path(os.getcwd())
@@ -213,7 +233,7 @@ def _get_repo_root():
@nox.session
@nox.parametrize("path", GENERATED_READMES)
-def readmegen(session, path):
+def readmegen(session: nox.sessions.Session, path: str) -> None:
"""(Re-)generates the readme for a sample."""
session.install("jinja2", "pyyaml")
dir_ = os.path.dirname(path)
diff --git a/samples/snippets/hybrid_glossaries/requirements.txt b/samples/snippets/hybrid_glossaries/requirements.txt
index ff0077c1..43508a3f 100644
--- a/samples/snippets/hybrid_glossaries/requirements.txt
+++ b/samples/snippets/hybrid_glossaries/requirements.txt
@@ -1,3 +1,3 @@
-google-cloud-translate==3.0.1
-google-cloud-vision==2.0.0
+google-cloud-translate==3.0.2
+google-cloud-vision==2.2.0
google-cloud-texttospeech==2.2.0
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 27d948d6..97bf7da8 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -17,6 +17,7 @@
import os
from pathlib import Path
import sys
+from typing import Callable, Dict, List, Optional
import nox
@@ -37,22 +38,28 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7"],
+ 'ignored_versions': ["2.7"],
+
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ 'enforce_type_hints': False,
+
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- "envs": {},
+ 'envs': {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append(".")
+ sys.path.append('.')
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -62,26 +69,26 @@
TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
-def get_pytest_env_vars():
+def get_pytest_env_vars() -> Dict[str, str]:
"""Returns a dict for pytest invocation."""
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG["gcloud_project_env"]
+ env_key = TEST_CONFIG['gcloud_project_env']
# This should error out if not set.
- ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
+ ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG["envs"])
+ ret.update(TEST_CONFIG['envs'])
return ret
# DO NOT EDIT - automatically generated.
# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
+IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
@@ -91,7 +98,7 @@ def get_pytest_env_vars():
#
-def _determine_local_import_names(start_dir):
+def _determine_local_import_names(start_dir: str) -> List[str]:
"""Determines all import names that should be considered "local".
This is used when running the linter to insure that import order is
@@ -129,17 +136,30 @@ def _determine_local_import_names(start_dir):
@nox.session
-def lint(session):
- session.install("flake8", "flake8-import-order")
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG['enforce_type_hints']:
+ session.install("flake8", "flake8-import-order")
+ else:
+ session.install("flake8", "flake8-import-order", "flake8-annotations")
local_names = _determine_local_import_names(".")
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- ".",
+ "."
]
session.run("flake8", *args)
+#
+# Black
+#
+
+
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ session.install("black")
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+ session.run("black", *python_files)
#
# Sample Tests
@@ -149,7 +169,7 @@ def lint(session):
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session, post_install=None):
+def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
"""Runs py.test for a particular project."""
if os.path.exists("requirements.txt"):
session.install("-r", "requirements.txt")
@@ -175,7 +195,7 @@ def _session_tests(session, post_install=None):
@nox.session(python=ALL_VERSIONS)
-def py(session):
+def py(session: nox.sessions.Session) -> None:
"""Runs py.test for a sample using the specified version of Python."""
if session.python in TESTED_VERSIONS:
_session_tests(session)
@@ -190,7 +210,7 @@ def py(session):
#
-def _get_repo_root():
+def _get_repo_root() -> Optional[str]:
""" Returns the root folder of the project. """
# Get root of this repository. Assume we don't have directories nested deeper than 10 items.
p = Path(os.getcwd())
@@ -213,7 +233,7 @@ def _get_repo_root():
@nox.session
@nox.parametrize("path", GENERATED_READMES)
-def readmegen(session, path):
+def readmegen(session: nox.sessions.Session, path: str) -> None:
"""(Re-)generates the readme for a sample."""
session.install("jinja2", "pyyaml")
dir_ = os.path.dirname(path)
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index 4f26184f..bf4676fa 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1,3 +1,3 @@
-google-cloud-translate==3.0.1
-google-cloud-storage==1.33.0
-google-cloud-automl==2.1.0
+google-cloud-translate==3.0.2
+google-cloud-storage==1.37.0
+google-cloud-automl==2.2.0
diff --git a/samples/snippets/snippets.py b/samples/snippets/snippets.py
index 5700969c..8f4381a9 100644
--- a/samples/snippets/snippets.py
+++ b/samples/snippets/snippets.py
@@ -24,8 +24,8 @@
import argparse
+# [START translate_detect_language]
def detect_language(text):
- # [START translate_detect_language]
"""Detects the text's language."""
from google.cloud import translate_v2 as translate
@@ -38,11 +38,12 @@ def detect_language(text):
print("Text: {}".format(text))
print("Confidence: {}".format(result["confidence"]))
print("Language: {}".format(result["language"]))
- # [END translate_detect_language]
+# [END translate_detect_language]
+
+# [START translate_list_codes]
def list_languages():
- # [START translate_list_codes]
"""Lists all available languages."""
from google.cloud import translate_v2 as translate
@@ -52,11 +53,12 @@ def list_languages():
for language in results:
print(u"{name} ({language})".format(**language))
- # [END translate_list_codes]
+# [END translate_list_codes]
+
+# [START translate_list_language_names]
def list_languages_with_target(target):
- # [START translate_list_language_names]
"""Lists all available languages and localizes them to the target language.
Target must be an ISO 639-1 language code.
@@ -70,11 +72,12 @@ def list_languages_with_target(target):
for language in results:
print(u"{name} ({language})".format(**language))
- # [END translate_list_language_names]
+# [END translate_list_language_names]
+
+# [START translate_text_with_model]
def translate_text_with_model(target, text, model="nmt"):
- # [START translate_text_with_model]
"""Translates text into the target language.
Make sure your project is allowlisted.
@@ -97,11 +100,12 @@ def translate_text_with_model(target, text, model="nmt"):
print(u"Text: {}".format(result["input"]))
print(u"Translation: {}".format(result["translatedText"]))
print(u"Detected source language: {}".format(result["detectedSourceLanguage"]))
- # [END translate_text_with_model]
+# [END translate_text_with_model]
+
+# [START translate_translate_text]
def translate_text(target, text):
- # [START translate_translate_text]
"""Translates text into the target language.
Target must be an ISO 639-1 language code.
@@ -122,7 +126,9 @@ def translate_text(target, text):
print(u"Text: {}".format(result["input"]))
print(u"Translation: {}".format(result["translatedText"]))
print(u"Detected source language: {}".format(result["detectedSourceLanguage"]))
- # [END translate_translate_text]
+
+
+# [END translate_translate_text]
if __name__ == "__main__":
diff --git a/samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model_test.py b/samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model_test.py
index 6579831a..5abadec4 100644
--- a/samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model_test.py
+++ b/samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model_test.py
@@ -15,7 +15,10 @@
import os
import uuid
+import backoff
+from google.api_core.exceptions import DeadlineExceeded, GoogleAPICallError
from google.cloud import storage
+from google.cloud.exceptions import NotFound
import pytest
import translate_v3_batch_translate_text_with_glossary_and_model
@@ -37,16 +40,24 @@ def glossary():
yield glossary_id
- try:
- translate_v3_delete_glossary.sample_delete_glossary(PROJECT_ID, glossary_id)
- except Exception:
- pass
+ # clean up
+ @backoff.on_exception(
+ backoff.expo, (DeadlineExceeded, GoogleAPICallError), max_time=60
+ )
+ def delete_glossary():
+ try:
+ translate_v3_delete_glossary.delete_glossary(PROJECT_ID, glossary_id)
+ except NotFound as e:
+ # Ignoring this case.
+ print("Got NotFound, detail: {}".format(str(e)))
+
+ delete_glossary()
@pytest.fixture(scope="function")
def bucket():
"""Create a temporary bucket to store annotation output."""
- bucket_name = "mike-test-delete-" + str(uuid.uuid1())
+ bucket_name = "test-bucket-for-glossary-" + str(uuid.uuid1())
storage_client = storage.Client()
bucket = storage_client.create_bucket(bucket_name)
@@ -58,7 +69,7 @@ def bucket():
def test_batch_translate_text_with_glossary_and_model(capsys, bucket, glossary):
translate_v3_batch_translate_text_with_glossary_and_model.batch_translate_text_with_glossary_and_model(
"gs://cloud-samples-data/translation/text_with_custom_model_and_glossary.txt",
- "gs://{}/translation/BATCH_TRANSLATION_OUTPUT/".format(bucket.name),
+ "gs://{}/translation/BATCH_TRANSLATION_GLOS_MODEL_OUTPUT/".format(bucket.name),
PROJECT_ID,
MODEL_ID,
glossary,
diff --git a/samples/snippets/translate_v3_batch_translate_text_with_glossary_test.py b/samples/snippets/translate_v3_batch_translate_text_with_glossary_test.py
index 33a1f829..ffac21a1 100644
--- a/samples/snippets/translate_v3_batch_translate_text_with_glossary_test.py
+++ b/samples/snippets/translate_v3_batch_translate_text_with_glossary_test.py
@@ -70,7 +70,7 @@ def bucket():
def test_batch_translate_text_with_glossary(capsys, bucket, glossary):
translate_v3_batch_translate_text_with_glossary.batch_translate_text_with_glossary(
"gs://cloud-samples-data/translation/text_with_glossary.txt",
- "gs://{}/translation/BATCH_TRANSLATION_OUTPUT/".format(bucket.name),
+ "gs://{}/translation/BATCH_TRANSLATION_GLOS_OUTPUT/".format(bucket.name),
PROJECT_ID,
glossary,
320,
diff --git a/samples/snippets/translate_v3_batch_translate_text_with_model.py b/samples/snippets/translate_v3_batch_translate_text_with_model.py
index 07d967d7..a5b94866 100644
--- a/samples/snippets/translate_v3_batch_translate_text_with_model.py
+++ b/samples/snippets/translate_v3_batch_translate_text_with_model.py
@@ -24,7 +24,7 @@ def batch_translate_text_with_model(
model_id="YOUR_MODEL_ID",
):
"""Batch translate text using Translation model.
- Model can be AutoML or General[built-in] model. """
+ Model can be AutoML or General[built-in] model."""
client = translate.TranslationServiceClient()
diff --git a/samples/snippets/translate_v3_batch_translate_text_with_model_test.py b/samples/snippets/translate_v3_batch_translate_text_with_model_test.py
index f6ad1007..02e99b69 100644
--- a/samples/snippets/translate_v3_batch_translate_text_with_model_test.py
+++ b/samples/snippets/translate_v3_batch_translate_text_with_model_test.py
@@ -40,7 +40,7 @@ def bucket():
def test_batch_translate_text_with_model(capsys, bucket):
translate_v3_batch_translate_text_with_model.batch_translate_text_with_model(
"gs://cloud-samples-data/translation/custom_model_text.txt",
- "gs://{}/translation/BATCH_TRANSLATION_OUTPUT/".format(bucket.name),
+ "gs://{}/translation/BATCH_TRANSLATION_MODEL_OUTPUT/".format(bucket.name),
PROJECT_ID,
MODEL_ID,
)
diff --git a/samples/snippets/translate_v3_delete_glossary.py b/samples/snippets/translate_v3_delete_glossary.py
index 336b7a06..b5c55b2b 100644
--- a/samples/snippets/translate_v3_delete_glossary.py
+++ b/samples/snippets/translate_v3_delete_glossary.py
@@ -17,7 +17,9 @@
def delete_glossary(
- project_id="YOUR_PROJECT_ID", glossary_id="YOUR_GLOSSARY_ID", timeout=180,
+ project_id="YOUR_PROJECT_ID",
+ glossary_id="YOUR_GLOSSARY_ID",
+ timeout=180,
):
"""Delete a specific glossary based on the glossary ID."""
client = translate.TranslationServiceClient()
diff --git a/scripts/fixup_translate_v3_keywords.py b/scripts/fixup_translate_v3_keywords.py
index 0f8aaa48..7bf59c96 100644
--- a/scripts/fixup_translate_v3_keywords.py
+++ b/scripts/fixup_translate_v3_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
diff --git a/scripts/fixup_translate_v3beta1_keywords.py b/scripts/fixup_translate_v3beta1_keywords.py
index 0f8aaa48..6df60492 100644
--- a/scripts/fixup_translate_v3beta1_keywords.py
+++ b/scripts/fixup_translate_v3beta1_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
@@ -40,6 +41,7 @@ def partition(
class translateCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'batch_translate_document': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', ),
'batch_translate_text': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'labels', ),
'create_glossary': ('parent', 'glossary', ),
'delete_glossary': ('name', ),
@@ -47,6 +49,7 @@ class translateCallTransformer(cst.CSTTransformer):
'get_glossary': ('name', ),
'get_supported_languages': ('parent', 'display_language_code', 'model', ),
'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', ),
+ 'translate_document': ('parent', 'target_language_code', 'document_input_config', 'source_language_code', 'document_output_config', 'model', 'glossary_config', 'labels', ),
'translate_text': ('contents', 'target_language_code', 'parent', 'mime_type', 'source_language_code', 'model', 'glossary_config', 'labels', ),
}
diff --git a/setup.py b/setup.py
index 9045fdcf..d8d11154 100644
--- a/setup.py
+++ b/setup.py
@@ -22,16 +22,15 @@
name = "google-cloud-translate"
description = "Google Cloud Translation API client library"
-version = "3.0.2"
+version = "3.1.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 1.22.0, < 2.0.0dev",
- "google-cloud-core >= 1.1.0, < 2.0dev",
- "libcst >= 0.2.5",
+ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
+ "google-cloud-core >= 1.3.0, < 2.0dev",
"proto-plus >= 0.4.0",
]
extras = {}
diff --git a/synth.metadata b/synth.metadata
index 85cf89ed..429cdcc3 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -4,21 +4,29 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/python-translate.git",
- "sha": "2bc6296122e54bc93804d37411ff7554e2808626"
+ "sha": "f41be7adee001acaaa14e892bfbb52b91a336cb0"
+ }
+ },
+ {
+ "git": {
+ "name": "googleapis",
+ "remote": "https://github.com/googleapis/googleapis.git",
+ "sha": "551ddbb55b96147012c00b66250dd5907556807c",
+ "internalRef": "364734171"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "f68649c5f26bcff6817c6d21e90dac0fc71fef8e"
+ "sha": "4501974ad08b5d693311457e2ea4ce845676e329"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "f68649c5f26bcff6817c6d21e90dac0fc71fef8e"
+ "sha": "4501974ad08b5d693311457e2ea4ce845676e329"
}
}
],
@@ -41,5 +49,124 @@
"generator": "bazel"
}
}
+ ],
+ "generatedFiles": [
+ ".coveragerc",
+ ".flake8",
+ ".github/CONTRIBUTING.md",
+ ".github/ISSUE_TEMPLATE/bug_report.md",
+ ".github/ISSUE_TEMPLATE/feature_request.md",
+ ".github/ISSUE_TEMPLATE/support_request.md",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/header-checker-lint.yml",
+ ".github/release-please.yml",
+ ".github/snippet-bot.yml",
+ ".gitignore",
+ ".kokoro/build.sh",
+ ".kokoro/continuous/common.cfg",
+ ".kokoro/continuous/continuous.cfg",
+ ".kokoro/docker/docs/Dockerfile",
+ ".kokoro/docker/docs/fetch_gpg_keys.sh",
+ ".kokoro/docs/common.cfg",
+ ".kokoro/docs/docs-presubmit.cfg",
+ ".kokoro/docs/docs.cfg",
+ ".kokoro/populate-secrets.sh",
+ ".kokoro/presubmit/common.cfg",
+ ".kokoro/presubmit/presubmit.cfg",
+ ".kokoro/publish-docs.sh",
+ ".kokoro/release.sh",
+ ".kokoro/release/common.cfg",
+ ".kokoro/release/release.cfg",
+ ".kokoro/samples/lint/common.cfg",
+ ".kokoro/samples/lint/continuous.cfg",
+ ".kokoro/samples/lint/periodic.cfg",
+ ".kokoro/samples/lint/presubmit.cfg",
+ ".kokoro/samples/python3.6/common.cfg",
+ ".kokoro/samples/python3.6/continuous.cfg",
+ ".kokoro/samples/python3.6/periodic-head.cfg",
+ ".kokoro/samples/python3.6/periodic.cfg",
+ ".kokoro/samples/python3.6/presubmit.cfg",
+ ".kokoro/samples/python3.7/common.cfg",
+ ".kokoro/samples/python3.7/continuous.cfg",
+ ".kokoro/samples/python3.7/periodic-head.cfg",
+ ".kokoro/samples/python3.7/periodic.cfg",
+ ".kokoro/samples/python3.7/presubmit.cfg",
+ ".kokoro/samples/python3.8/common.cfg",
+ ".kokoro/samples/python3.8/continuous.cfg",
+ ".kokoro/samples/python3.8/periodic-head.cfg",
+ ".kokoro/samples/python3.8/periodic.cfg",
+ ".kokoro/samples/python3.8/presubmit.cfg",
+ ".kokoro/test-samples-against-head.sh",
+ ".kokoro/test-samples-impl.sh",
+ ".kokoro/test-samples.sh",
+ ".kokoro/trampoline.sh",
+ ".kokoro/trampoline_v2.sh",
+ ".pre-commit-config.yaml",
+ ".trampolinerc",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.rst",
+ "LICENSE",
+ "MANIFEST.in",
+ "docs/_static/custom.css",
+ "docs/_templates/layout.html",
+ "docs/conf.py",
+ "docs/multiprocessing.rst",
+ "docs/translate_v3/services.rst",
+ "docs/translate_v3/translation_service.rst",
+ "docs/translate_v3/types.rst",
+ "docs/translate_v3beta1/services.rst",
+ "docs/translate_v3beta1/translation_service.rst",
+ "docs/translate_v3beta1/types.rst",
+ "google/cloud/translate/__init__.py",
+ "google/cloud/translate/py.typed",
+ "google/cloud/translate_v3/__init__.py",
+ "google/cloud/translate_v3/py.typed",
+ "google/cloud/translate_v3/services/__init__.py",
+ "google/cloud/translate_v3/services/translation_service/__init__.py",
+ "google/cloud/translate_v3/services/translation_service/async_client.py",
+ "google/cloud/translate_v3/services/translation_service/client.py",
+ "google/cloud/translate_v3/services/translation_service/pagers.py",
+ "google/cloud/translate_v3/services/translation_service/transports/__init__.py",
+ "google/cloud/translate_v3/services/translation_service/transports/base.py",
+ "google/cloud/translate_v3/services/translation_service/transports/grpc.py",
+ "google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py",
+ "google/cloud/translate_v3/types/__init__.py",
+ "google/cloud/translate_v3/types/translation_service.py",
+ "google/cloud/translate_v3beta1/__init__.py",
+ "google/cloud/translate_v3beta1/py.typed",
+ "google/cloud/translate_v3beta1/services/__init__.py",
+ "google/cloud/translate_v3beta1/services/translation_service/__init__.py",
+ "google/cloud/translate_v3beta1/services/translation_service/async_client.py",
+ "google/cloud/translate_v3beta1/services/translation_service/client.py",
+ "google/cloud/translate_v3beta1/services/translation_service/pagers.py",
+ "google/cloud/translate_v3beta1/services/translation_service/transports/__init__.py",
+ "google/cloud/translate_v3beta1/services/translation_service/transports/base.py",
+ "google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py",
+ "google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py",
+ "google/cloud/translate_v3beta1/types/__init__.py",
+ "google/cloud/translate_v3beta1/types/translation_service.py",
+ "mypy.ini",
+ "noxfile.py",
+ "renovate.json",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/hybrid_glossaries/noxfile.py",
+ "samples/snippets/noxfile.py",
+ "scripts/decrypt-secrets.sh",
+ "scripts/fixup_translate_v3_keywords.py",
+ "scripts/fixup_translate_v3beta1_keywords.py",
+ "scripts/readme-gen/readme_gen.py",
+ "scripts/readme-gen/templates/README.tmpl.rst",
+ "scripts/readme-gen/templates/auth.tmpl.rst",
+ "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
+ "scripts/readme-gen/templates/install_deps.tmpl.rst",
+ "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
+ "setup.cfg",
+ "testing/.gitignore",
+ "tests/unit/gapic/translate_v3/__init__.py",
+ "tests/unit/gapic/translate_v3/test_translation_service.py",
+ "tests/unit/gapic/translate_v3beta1/__init__.py",
+ "tests/unit/gapic/translate_v3beta1/test_translation_service.py",
+ "translation-v3-py.tar.gz"
]
}
\ No newline at end of file
diff --git a/synth.py b/synth.py
index 8ca9bf33..71f203b4 100644
--- a/synth.py
+++ b/synth.py
@@ -62,6 +62,13 @@
# Correct namespace in noxfile
s.replace("noxfile.py", "google.cloud.translation", "google.cloud.translate")
+# Exclude the v2 from coverage targets
+s.replace(".coveragerc",
+" google/cloud/translate/__init__.py",
+""" google/cloud/translate/__init__.py
+ google/cloud/translate_v2/__init__.py
+ google/cloud/__init__.py"""
+)
# ----------------------------------------------------------------------------
# Samples templates
# ----------------------------------------------------------------------------
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
new file mode 100644
index 00000000..d698348c
--- /dev/null
+++ b/testing/constraints-3.6.txt
@@ -0,0 +1,10 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+google-api-core==1.22.2
+google-cloud-core==1.3.0
+proto-plus==0.4.0
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/gapic/translate_v3/__init__.py b/tests/unit/gapic/translate_v3/__init__.py
index 8b137891..42ffdf2b 100644
--- a/tests/unit/gapic/translate_v3/__init__.py
+++ b/tests/unit/gapic/translate_v3/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/translate_v3/test_translation_service.py b/tests/unit/gapic/translate_v3/test_translation_service.py
index c296227c..6950ea3c 100644
--- a/tests/unit/gapic/translate_v3/test_translation_service.py
+++ b/tests/unit/gapic/translate_v3/test_translation_service.py
@@ -95,7 +95,24 @@ def test__get_default_mtls_endpoint():
@pytest.mark.parametrize(
- "client_class", [TranslationServiceClient, TranslationServiceAsyncClient]
+ "client_class", [TranslationServiceClient, TranslationServiceAsyncClient,]
+)
+def test_translation_service_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "translate.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [TranslationServiceClient, TranslationServiceAsyncClient,]
)
def test_translation_service_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
@@ -104,17 +121,22 @@ def test_translation_service_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
- assert client._transport._host == "translate.googleapis.com:443"
+ assert client.transport._host == "translate.googleapis.com:443"
def test_translation_service_client_get_transport_class():
transport = TranslationServiceClient.get_transport_class()
- assert transport == transports.TranslationServiceGrpcTransport
+ available_transports = [
+ transports.TranslationServiceGrpcTransport,
+ ]
+ assert transport in available_transports
transport = TranslationServiceClient.get_transport_class("grpc")
assert transport == transports.TranslationServiceGrpcTransport
@@ -165,7 +187,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -181,7 +203,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -197,7 +219,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -225,7 +247,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -286,29 +308,25 @@ def test_translation_service_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -317,66 +335,53 @@ def test_translation_service_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -402,7 +407,7 @@ def test_translation_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -432,7 +437,7 @@ def test_translation_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -451,7 +456,7 @@ def test_translation_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -469,7 +474,7 @@ def test_translate_text(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.translate_text), "__call__") as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.TranslateTextResponse()
@@ -482,6 +487,7 @@ def test_translate_text(
assert args[0] == translation_service.TranslateTextRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.TranslateTextResponse)
@@ -489,20 +495,37 @@ def test_translate_text_from_dict():
test_translate_text(request_type=dict)
+def test_translate_text_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
+ client.translate_text()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.TranslateTextRequest()
+
+
@pytest.mark.asyncio
-async def test_translate_text_async(transport: str = "grpc_asyncio"):
+async def test_translate_text_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.TranslateTextRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.TranslateTextRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.translate_text), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.TranslateTextResponse()
@@ -514,12 +537,17 @@ async def test_translate_text_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.TranslateTextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.TranslateTextResponse)
+@pytest.mark.asyncio
+async def test_translate_text_async_from_dict():
+ await test_translate_text_async(request_type=dict)
+
+
def test_translate_text_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -529,7 +557,7 @@ def test_translate_text_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.translate_text), "__call__") as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
call.return_value = translation_service.TranslateTextResponse()
client.translate_text(request)
@@ -556,9 +584,7 @@ async def test_translate_text_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.translate_text), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.TranslateTextResponse()
)
@@ -579,7 +605,7 @@ def test_translate_text_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.translate_text), "__call__") as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.TranslateTextResponse()
@@ -636,9 +662,7 @@ async def test_translate_text_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.translate_text), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.TranslateTextResponse()
@@ -706,7 +730,7 @@ def test_detect_language(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.detect_language), "__call__") as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.DetectLanguageResponse()
@@ -719,6 +743,7 @@ def test_detect_language(
assert args[0] == translation_service.DetectLanguageRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.DetectLanguageResponse)
@@ -726,20 +751,37 @@ def test_detect_language_from_dict():
test_detect_language(request_type=dict)
+def test_detect_language_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
+ client.detect_language()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.DetectLanguageRequest()
+
+
@pytest.mark.asyncio
-async def test_detect_language_async(transport: str = "grpc_asyncio"):
+async def test_detect_language_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.DetectLanguageRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.DetectLanguageRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.detect_language), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.DetectLanguageResponse()
@@ -751,12 +793,17 @@ async def test_detect_language_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.DetectLanguageRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.DetectLanguageResponse)
+@pytest.mark.asyncio
+async def test_detect_language_async_from_dict():
+ await test_detect_language_async(request_type=dict)
+
+
def test_detect_language_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -766,7 +813,7 @@ def test_detect_language_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.detect_language), "__call__") as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
call.return_value = translation_service.DetectLanguageResponse()
client.detect_language(request)
@@ -793,9 +840,7 @@ async def test_detect_language_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.detect_language), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.DetectLanguageResponse()
)
@@ -816,7 +861,7 @@ def test_detect_language_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.detect_language), "__call__") as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.DetectLanguageResponse()
@@ -865,9 +910,7 @@ async def test_detect_language_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.detect_language), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.DetectLanguageResponse()
@@ -929,7 +972,7 @@ def test_get_supported_languages(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.SupportedLanguages()
@@ -943,6 +986,7 @@ def test_get_supported_languages(
assert args[0] == translation_service.GetSupportedLanguagesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.SupportedLanguages)
@@ -950,19 +994,40 @@ def test_get_supported_languages_from_dict():
test_get_supported_languages(request_type=dict)
+def test_get_supported_languages_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_supported_languages), "__call__"
+ ) as call:
+ client.get_supported_languages()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.GetSupportedLanguagesRequest()
+
+
@pytest.mark.asyncio
-async def test_get_supported_languages_async(transport: str = "grpc_asyncio"):
+async def test_get_supported_languages_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.GetSupportedLanguagesRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.GetSupportedLanguagesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -975,12 +1040,17 @@ async def test_get_supported_languages_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.GetSupportedLanguagesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.SupportedLanguages)
+@pytest.mark.asyncio
+async def test_get_supported_languages_async_from_dict():
+ await test_get_supported_languages_async(request_type=dict)
+
+
def test_get_supported_languages_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -991,7 +1061,7 @@ def test_get_supported_languages_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
call.return_value = translation_service.SupportedLanguages()
@@ -1020,7 +1090,7 @@ async def test_get_supported_languages_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.SupportedLanguages()
@@ -1043,7 +1113,7 @@ def test_get_supported_languages_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.SupportedLanguages()
@@ -1090,7 +1160,7 @@ async def test_get_supported_languages_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.SupportedLanguages()
@@ -1148,7 +1218,7 @@ def test_batch_translate_text(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_text), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1169,19 +1239,40 @@ def test_batch_translate_text_from_dict():
test_batch_translate_text(request_type=dict)
+def test_batch_translate_text_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_text), "__call__"
+ ) as call:
+ client.batch_translate_text()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.BatchTranslateTextRequest()
+
+
@pytest.mark.asyncio
-async def test_batch_translate_text_async(transport: str = "grpc_asyncio"):
+async def test_batch_translate_text_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.BatchTranslateTextRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.BatchTranslateTextRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_text), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1194,12 +1285,17 @@ async def test_batch_translate_text_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.BatchTranslateTextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_batch_translate_text_async_from_dict():
+ await test_batch_translate_text_async(request_type=dict)
+
+
def test_batch_translate_text_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1210,7 +1306,7 @@ def test_batch_translate_text_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_text), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1239,7 +1335,7 @@ async def test_batch_translate_text_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_text), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -1269,7 +1365,7 @@ def test_create_glossary(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1289,20 +1385,37 @@ def test_create_glossary_from_dict():
test_create_glossary(request_type=dict)
+def test_create_glossary_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
+ client.create_glossary()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.CreateGlossaryRequest()
+
+
@pytest.mark.asyncio
-async def test_create_glossary_async(transport: str = "grpc_asyncio"):
+async def test_create_glossary_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.CreateGlossaryRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.CreateGlossaryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1314,12 +1427,17 @@ async def test_create_glossary_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.CreateGlossaryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_create_glossary_async_from_dict():
+ await test_create_glossary_async(request_type=dict)
+
+
def test_create_glossary_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1329,7 +1447,7 @@ def test_create_glossary_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_glossary(request)
@@ -1356,9 +1474,7 @@ async def test_create_glossary_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -1379,7 +1495,7 @@ def test_create_glossary_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1420,9 +1536,7 @@ async def test_create_glossary_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1474,7 +1588,7 @@ def test_list_glossaries(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.ListGlossariesResponse(
next_page_token="next_page_token_value",
@@ -1489,6 +1603,7 @@ def test_list_glossaries(
assert args[0] == translation_service.ListGlossariesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListGlossariesPager)
assert response.next_page_token == "next_page_token_value"
@@ -1498,20 +1613,37 @@ def test_list_glossaries_from_dict():
test_list_glossaries(request_type=dict)
+def test_list_glossaries_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
+ client.list_glossaries()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.ListGlossariesRequest()
+
+
@pytest.mark.asyncio
-async def test_list_glossaries_async(transport: str = "grpc_asyncio"):
+async def test_list_glossaries_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.ListGlossariesRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.ListGlossariesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_glossaries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.ListGlossariesResponse(
@@ -1525,7 +1657,7 @@ async def test_list_glossaries_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.ListGlossariesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListGlossariesAsyncPager)
@@ -1533,6 +1665,11 @@ async def test_list_glossaries_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_glossaries_async_from_dict():
+ await test_list_glossaries_async(request_type=dict)
+
+
def test_list_glossaries_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1542,7 +1679,7 @@ def test_list_glossaries_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
call.return_value = translation_service.ListGlossariesResponse()
client.list_glossaries(request)
@@ -1569,9 +1706,7 @@ async def test_list_glossaries_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_glossaries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.ListGlossariesResponse()
)
@@ -1592,7 +1727,7 @@ def test_list_glossaries_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.ListGlossariesResponse()
@@ -1626,9 +1761,7 @@ async def test_list_glossaries_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_glossaries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.ListGlossariesResponse()
@@ -1665,7 +1798,7 @@ def test_list_glossaries_pager():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
translation_service.ListGlossariesResponse(
@@ -1708,7 +1841,7 @@ def test_list_glossaries_pages():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
translation_service.ListGlossariesResponse(
@@ -1746,9 +1879,7 @@ async def test_list_glossaries_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_glossaries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1792,9 +1923,7 @@ async def test_list_glossaries_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_glossaries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1839,7 +1968,7 @@ def test_get_glossary(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.Glossary(
name="name_value",
@@ -1858,6 +1987,7 @@ def test_get_glossary(
assert args[0] == translation_service.GetGlossaryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.Glossary)
assert response.name == "name_value"
@@ -1869,20 +1999,36 @@ def test_get_glossary_from_dict():
test_get_glossary(request_type=dict)
+def test_get_glossary_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
+ client.get_glossary()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.GetGlossaryRequest()
+
+
@pytest.mark.asyncio
-async def test_get_glossary_async(transport: str = "grpc_asyncio"):
+async def test_get_glossary_async(
+ transport: str = "grpc_asyncio", request_type=translation_service.GetGlossaryRequest
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.GetGlossaryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.Glossary(name="name_value", entry_count=1210,)
@@ -1894,7 +2040,7 @@ async def test_get_glossary_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.GetGlossaryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.Glossary)
@@ -1904,6 +2050,11 @@ async def test_get_glossary_async(transport: str = "grpc_asyncio"):
assert response.entry_count == 1210
+@pytest.mark.asyncio
+async def test_get_glossary_async_from_dict():
+ await test_get_glossary_async(request_type=dict)
+
+
def test_get_glossary_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1913,7 +2064,7 @@ def test_get_glossary_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
call.return_value = translation_service.Glossary()
client.get_glossary(request)
@@ -1940,9 +2091,7 @@ async def test_get_glossary_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.Glossary()
)
@@ -1963,7 +2112,7 @@ def test_get_glossary_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.Glossary()
@@ -1997,9 +2146,7 @@ async def test_get_glossary_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.Glossary()
@@ -2044,7 +2191,7 @@ def test_delete_glossary(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -2064,20 +2211,37 @@ def test_delete_glossary_from_dict():
test_delete_glossary(request_type=dict)
+def test_delete_glossary_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
+ client.delete_glossary()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.DeleteGlossaryRequest()
+
+
@pytest.mark.asyncio
-async def test_delete_glossary_async(transport: str = "grpc_asyncio"):
+async def test_delete_glossary_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.DeleteGlossaryRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.DeleteGlossaryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -2089,12 +2253,17 @@ async def test_delete_glossary_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.DeleteGlossaryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_delete_glossary_async_from_dict():
+ await test_delete_glossary_async(request_type=dict)
+
+
def test_delete_glossary_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2104,7 +2273,7 @@ def test_delete_glossary_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_glossary(request)
@@ -2131,9 +2300,7 @@ async def test_delete_glossary_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -2154,7 +2321,7 @@ def test_delete_glossary_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2188,9 +2355,7 @@ async def test_delete_glossary_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2259,7 +2424,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = TranslationServiceClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -2295,7 +2460,7 @@ def test_transport_adc(transport_class):
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.TranslationServiceGrpcTransport,)
+ assert isinstance(client.transport, transports.TranslationServiceGrpcTransport,)
def test_translation_service_base_transport_error():
@@ -2403,6 +2568,56 @@ def test_translation_service_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranslationServiceGrpcTransport,
+ transports.TranslationServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_translation_service_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-translation",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_translation_service_host_no_port():
client = TranslationServiceClient(
credentials=credentials.AnonymousCredentials(),
@@ -2410,7 +2625,7 @@ def test_translation_service_host_no_port():
api_endpoint="translate.googleapis.com"
),
)
- assert client._transport._host == "translate.googleapis.com:443"
+ assert client.transport._host == "translate.googleapis.com:443"
def test_translation_service_host_with_port():
@@ -2420,11 +2635,11 @@ def test_translation_service_host_with_port():
api_endpoint="translate.googleapis.com:8000"
),
)
- assert client._transport._host == "translate.googleapis.com:8000"
+ assert client.transport._host == "translate.googleapis.com:8000"
def test_translation_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.TranslationServiceGrpcTransport(
@@ -2432,10 +2647,11 @@ def test_translation_service_grpc_transport_channel():
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
def test_translation_service_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.TranslationServiceGrpcAsyncIOTransport(
@@ -2443,8 +2659,11 @@ def test_translation_service_grpc_asyncio_transport_channel():
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2459,7 +2678,7 @@ def test_translation_service_transport_channel_mtls_with_client_cert_source(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -2491,10 +2710,17 @@ def test_translation_service_transport_channel_mtls_with_client_cert_source(
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2510,7 +2736,7 @@ def test_translation_service_transport_channel_mtls_with_adc(transport_class):
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
@@ -2534,6 +2760,10 @@ def test_translation_service_transport_channel_mtls_with_adc(transport_class):
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
@@ -2542,7 +2772,7 @@ def test_translation_service_grpc_lro_client():
client = TranslationServiceClient(
credentials=credentials.AnonymousCredentials(), transport="grpc",
)
- transport = client._transport
+ transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
@@ -2555,7 +2785,7 @@ def test_translation_service_grpc_lro_async_client():
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
- transport = client._client._transport
+ transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
@@ -2589,6 +2819,107 @@ def test_parse_glossary_path():
assert expected == actual
+def test_common_billing_account_path():
+ billing_account = "cuttlefish"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = TranslationServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "mussel",
+ }
+ path = TranslationServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "winkle"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = TranslationServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nautilus",
+ }
+ path = TranslationServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "scallop"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = TranslationServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "abalone",
+ }
+ path = TranslationServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "squid"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = TranslationServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "clam",
+ }
+ path = TranslationServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "whelk"
+ location = "octopus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = TranslationServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ }
+ path = TranslationServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
diff --git a/tests/unit/gapic/translate_v3beta1/__init__.py b/tests/unit/gapic/translate_v3beta1/__init__.py
index 8b137891..42ffdf2b 100644
--- a/tests/unit/gapic/translate_v3beta1/__init__.py
+++ b/tests/unit/gapic/translate_v3beta1/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/translate_v3beta1/test_translation_service.py b/tests/unit/gapic/translate_v3beta1/test_translation_service.py
index 5af7b5c2..15060d0e 100644
--- a/tests/unit/gapic/translate_v3beta1/test_translation_service.py
+++ b/tests/unit/gapic/translate_v3beta1/test_translation_service.py
@@ -95,7 +95,24 @@ def test__get_default_mtls_endpoint():
@pytest.mark.parametrize(
- "client_class", [TranslationServiceClient, TranslationServiceAsyncClient]
+ "client_class", [TranslationServiceClient, TranslationServiceAsyncClient,]
+)
+def test_translation_service_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "translate.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [TranslationServiceClient, TranslationServiceAsyncClient,]
)
def test_translation_service_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
@@ -104,17 +121,22 @@ def test_translation_service_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
- assert client._transport._host == "translate.googleapis.com:443"
+ assert client.transport._host == "translate.googleapis.com:443"
def test_translation_service_client_get_transport_class():
transport = TranslationServiceClient.get_transport_class()
- assert transport == transports.TranslationServiceGrpcTransport
+ available_transports = [
+ transports.TranslationServiceGrpcTransport,
+ ]
+ assert transport in available_transports
transport = TranslationServiceClient.get_transport_class("grpc")
assert transport == transports.TranslationServiceGrpcTransport
@@ -165,7 +187,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -181,7 +203,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -197,7 +219,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -225,7 +247,7 @@ def test_translation_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -286,29 +308,25 @@ def test_translation_service_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -317,66 +335,53 @@ def test_translation_service_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -402,7 +407,7 @@ def test_translation_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -432,7 +437,7 @@ def test_translation_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -451,7 +456,7 @@ def test_translation_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -469,7 +474,7 @@ def test_translate_text(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.translate_text), "__call__") as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.TranslateTextResponse()
@@ -482,6 +487,7 @@ def test_translate_text(
assert args[0] == translation_service.TranslateTextRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.TranslateTextResponse)
@@ -489,20 +495,37 @@ def test_translate_text_from_dict():
test_translate_text(request_type=dict)
+def test_translate_text_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
+ client.translate_text()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.TranslateTextRequest()
+
+
@pytest.mark.asyncio
-async def test_translate_text_async(transport: str = "grpc_asyncio"):
+async def test_translate_text_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.TranslateTextRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.TranslateTextRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.translate_text), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.TranslateTextResponse()
@@ -514,12 +537,17 @@ async def test_translate_text_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.TranslateTextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.TranslateTextResponse)
+@pytest.mark.asyncio
+async def test_translate_text_async_from_dict():
+ await test_translate_text_async(request_type=dict)
+
+
def test_translate_text_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -529,7 +557,7 @@ def test_translate_text_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.translate_text), "__call__") as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
call.return_value = translation_service.TranslateTextResponse()
client.translate_text(request)
@@ -556,9 +584,7 @@ async def test_translate_text_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.translate_text), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.translate_text), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.TranslateTextResponse()
)
@@ -587,7 +613,7 @@ def test_detect_language(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.detect_language), "__call__") as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.DetectLanguageResponse()
@@ -600,6 +626,7 @@ def test_detect_language(
assert args[0] == translation_service.DetectLanguageRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.DetectLanguageResponse)
@@ -607,20 +634,37 @@ def test_detect_language_from_dict():
test_detect_language(request_type=dict)
+def test_detect_language_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
+ client.detect_language()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.DetectLanguageRequest()
+
+
@pytest.mark.asyncio
-async def test_detect_language_async(transport: str = "grpc_asyncio"):
+async def test_detect_language_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.DetectLanguageRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.DetectLanguageRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.detect_language), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.DetectLanguageResponse()
@@ -632,12 +676,17 @@ async def test_detect_language_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.DetectLanguageRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.DetectLanguageResponse)
+@pytest.mark.asyncio
+async def test_detect_language_async_from_dict():
+ await test_detect_language_async(request_type=dict)
+
+
def test_detect_language_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -647,7 +696,7 @@ def test_detect_language_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.detect_language), "__call__") as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
call.return_value = translation_service.DetectLanguageResponse()
client.detect_language(request)
@@ -674,9 +723,7 @@ async def test_detect_language_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.detect_language), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.DetectLanguageResponse()
)
@@ -697,7 +744,7 @@ def test_detect_language_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.detect_language), "__call__") as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.DetectLanguageResponse()
@@ -740,9 +787,7 @@ async def test_detect_language_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.detect_language), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.detect_language), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.DetectLanguageResponse()
@@ -798,7 +843,7 @@ def test_get_supported_languages(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.SupportedLanguages()
@@ -812,6 +857,7 @@ def test_get_supported_languages(
assert args[0] == translation_service.GetSupportedLanguagesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.SupportedLanguages)
@@ -819,19 +865,40 @@ def test_get_supported_languages_from_dict():
test_get_supported_languages(request_type=dict)
+def test_get_supported_languages_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_supported_languages), "__call__"
+ ) as call:
+ client.get_supported_languages()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.GetSupportedLanguagesRequest()
+
+
@pytest.mark.asyncio
-async def test_get_supported_languages_async(transport: str = "grpc_asyncio"):
+async def test_get_supported_languages_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.GetSupportedLanguagesRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.GetSupportedLanguagesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -844,12 +911,17 @@ async def test_get_supported_languages_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.GetSupportedLanguagesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.SupportedLanguages)
+@pytest.mark.asyncio
+async def test_get_supported_languages_async_from_dict():
+ await test_get_supported_languages_async(request_type=dict)
+
+
def test_get_supported_languages_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -860,7 +932,7 @@ def test_get_supported_languages_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
call.return_value = translation_service.SupportedLanguages()
@@ -889,7 +961,7 @@ async def test_get_supported_languages_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.SupportedLanguages()
@@ -912,7 +984,7 @@ def test_get_supported_languages_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.SupportedLanguages()
@@ -959,7 +1031,7 @@ async def test_get_supported_languages_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_supported_languages), "__call__"
+ type(client.transport.get_supported_languages), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.SupportedLanguages()
@@ -982,30 +1054,334 @@ async def test_get_supported_languages_flattened_async():
assert args[0].parent == "parent_value"
- assert args[0].display_language_code == "display_language_code_value"
+ assert args[0].display_language_code == "display_language_code_value"
+
+ assert args[0].model == "model_value"
+
+
+@pytest.mark.asyncio
+async def test_get_supported_languages_flattened_error_async():
+ client = TranslationServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_supported_languages(
+ translation_service.GetSupportedLanguagesRequest(),
+ parent="parent_value",
+ display_language_code="display_language_code_value",
+ model="model_value",
+ )
+
+
+def test_translate_document(
+ transport: str = "grpc", request_type=translation_service.TranslateDocumentRequest
+):
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.translate_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = translation_service.TranslateDocumentResponse(
+ model="model_value",
+ )
+
+ response = client.translate_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.TranslateDocumentRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, translation_service.TranslateDocumentResponse)
+
+ assert response.model == "model_value"
+
+
+def test_translate_document_from_dict():
+ test_translate_document(request_type=dict)
+
+
+def test_translate_document_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.translate_document), "__call__"
+ ) as call:
+ client.translate_document()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.TranslateDocumentRequest()
+
+
+@pytest.mark.asyncio
+async def test_translate_document_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.TranslateDocumentRequest,
+):
+ client = TranslationServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.translate_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ translation_service.TranslateDocumentResponse(model="model_value",)
+ )
+
+ response = await client.translate_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.TranslateDocumentRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, translation_service.TranslateDocumentResponse)
+
+ assert response.model == "model_value"
+
+
+@pytest.mark.asyncio
+async def test_translate_document_async_from_dict():
+ await test_translate_document_async(request_type=dict)
+
+
+def test_translate_document_field_headers():
+ client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = translation_service.TranslateDocumentRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.translate_document), "__call__"
+ ) as call:
+ call.return_value = translation_service.TranslateDocumentResponse()
+
+ client.translate_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_translate_document_field_headers_async():
+ client = TranslationServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = translation_service.TranslateDocumentRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.translate_document), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ translation_service.TranslateDocumentResponse()
+ )
+
+ await client.translate_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_batch_translate_text(
+ transport: str = "grpc", request_type=translation_service.BatchTranslateTextRequest
+):
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_text), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.batch_translate_text(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.BatchTranslateTextRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_batch_translate_text_from_dict():
+ test_batch_translate_text(request_type=dict)
+
+
+def test_batch_translate_text_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_text), "__call__"
+ ) as call:
+ client.batch_translate_text()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.BatchTranslateTextRequest()
+
+
+@pytest.mark.asyncio
+async def test_batch_translate_text_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.BatchTranslateTextRequest,
+):
+ client = TranslationServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_text), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.batch_translate_text(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.BatchTranslateTextRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+@pytest.mark.asyncio
+async def test_batch_translate_text_async_from_dict():
+ await test_batch_translate_text_async(request_type=dict)
+
+
+def test_batch_translate_text_field_headers():
+ client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = translation_service.BatchTranslateTextRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_text), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.batch_translate_text(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
- assert args[0].model == "model_value"
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
-async def test_get_supported_languages_flattened_error_async():
+async def test_batch_translate_text_field_headers_async():
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(),
)
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- await client.get_supported_languages(
- translation_service.GetSupportedLanguagesRequest(),
- parent="parent_value",
- display_language_code="display_language_code_value",
- model="model_value",
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = translation_service.BatchTranslateTextRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_text), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
)
+ await client.batch_translate_text(request)
-def test_batch_translate_text(
- transport: str = "grpc", request_type=translation_service.BatchTranslateTextRequest
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_batch_translate_document(
+ transport: str = "grpc",
+ request_type=translation_service.BatchTranslateDocumentRequest,
):
client = TranslationServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
@@ -1017,73 +1393,99 @@ def test_batch_translate_text(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_document), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
- response = client.batch_translate_text(request)
+ response = client.batch_translate_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == translation_service.BatchTranslateTextRequest()
+ assert args[0] == translation_service.BatchTranslateDocumentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
-def test_batch_translate_text_from_dict():
- test_batch_translate_text(request_type=dict)
+def test_batch_translate_document_from_dict():
+ test_batch_translate_document(request_type=dict)
+
+
+def test_batch_translate_document_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_translate_document), "__call__"
+ ) as call:
+ client.batch_translate_document()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.BatchTranslateDocumentRequest()
@pytest.mark.asyncio
-async def test_batch_translate_text_async(transport: str = "grpc_asyncio"):
+async def test_batch_translate_document_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.BatchTranslateDocumentRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.BatchTranslateTextRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_document), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
- response = await client.batch_translate_text(request)
+ response = await client.batch_translate_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.BatchTranslateDocumentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
-def test_batch_translate_text_field_headers():
+@pytest.mark.asyncio
+async def test_batch_translate_document_async_from_dict():
+ await test_batch_translate_document_async(request_type=dict)
+
+
+def test_batch_translate_document_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = translation_service.BatchTranslateTextRequest()
+ request = translation_service.BatchTranslateDocumentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_document), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
- client.batch_translate_text(request)
+ client.batch_translate_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
@@ -1096,25 +1498,25 @@ def test_batch_translate_text_field_headers():
@pytest.mark.asyncio
-async def test_batch_translate_text_field_headers_async():
+async def test_batch_translate_document_field_headers_async():
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
- request = translation_service.BatchTranslateTextRequest()
+ request = translation_service.BatchTranslateDocumentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.batch_translate_text), "__call__"
+ type(client.transport.batch_translate_document), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
- await client.batch_translate_text(request)
+ await client.batch_translate_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
@@ -1138,7 +1540,7 @@ def test_create_glossary(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1158,20 +1560,37 @@ def test_create_glossary_from_dict():
test_create_glossary(request_type=dict)
+def test_create_glossary_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
+ client.create_glossary()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.CreateGlossaryRequest()
+
+
@pytest.mark.asyncio
-async def test_create_glossary_async(transport: str = "grpc_asyncio"):
+async def test_create_glossary_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.CreateGlossaryRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.CreateGlossaryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1183,12 +1602,17 @@ async def test_create_glossary_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.CreateGlossaryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_create_glossary_async_from_dict():
+ await test_create_glossary_async(request_type=dict)
+
+
def test_create_glossary_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1198,7 +1622,7 @@ def test_create_glossary_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_glossary(request)
@@ -1225,9 +1649,7 @@ async def test_create_glossary_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -1248,7 +1670,7 @@ def test_create_glossary_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1289,9 +1711,7 @@ async def test_create_glossary_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1343,7 +1763,7 @@ def test_list_glossaries(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.ListGlossariesResponse(
next_page_token="next_page_token_value",
@@ -1358,6 +1778,7 @@ def test_list_glossaries(
assert args[0] == translation_service.ListGlossariesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListGlossariesPager)
assert response.next_page_token == "next_page_token_value"
@@ -1367,20 +1788,37 @@ def test_list_glossaries_from_dict():
test_list_glossaries(request_type=dict)
+def test_list_glossaries_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
+ client.list_glossaries()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.ListGlossariesRequest()
+
+
@pytest.mark.asyncio
-async def test_list_glossaries_async(transport: str = "grpc_asyncio"):
+async def test_list_glossaries_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.ListGlossariesRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.ListGlossariesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_glossaries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.ListGlossariesResponse(
@@ -1394,7 +1832,7 @@ async def test_list_glossaries_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.ListGlossariesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListGlossariesAsyncPager)
@@ -1402,6 +1840,11 @@ async def test_list_glossaries_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_glossaries_async_from_dict():
+ await test_list_glossaries_async(request_type=dict)
+
+
def test_list_glossaries_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1411,7 +1854,7 @@ def test_list_glossaries_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
call.return_value = translation_service.ListGlossariesResponse()
client.list_glossaries(request)
@@ -1438,9 +1881,7 @@ async def test_list_glossaries_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_glossaries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.ListGlossariesResponse()
)
@@ -1461,7 +1902,7 @@ def test_list_glossaries_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.ListGlossariesResponse()
@@ -1501,9 +1942,7 @@ async def test_list_glossaries_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_glossaries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.ListGlossariesResponse()
@@ -1546,7 +1985,7 @@ def test_list_glossaries_pager():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
translation_service.ListGlossariesResponse(
@@ -1589,7 +2028,7 @@ def test_list_glossaries_pages():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_glossaries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
translation_service.ListGlossariesResponse(
@@ -1627,9 +2066,7 @@ async def test_list_glossaries_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_glossaries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1673,9 +2110,7 @@ async def test_list_glossaries_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_glossaries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1720,7 +2155,7 @@ def test_get_glossary(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.Glossary(
name="name_value",
@@ -1739,6 +2174,7 @@ def test_get_glossary(
assert args[0] == translation_service.GetGlossaryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, translation_service.Glossary)
assert response.name == "name_value"
@@ -1750,20 +2186,36 @@ def test_get_glossary_from_dict():
test_get_glossary(request_type=dict)
+def test_get_glossary_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
+ client.get_glossary()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.GetGlossaryRequest()
+
+
@pytest.mark.asyncio
-async def test_get_glossary_async(transport: str = "grpc_asyncio"):
+async def test_get_glossary_async(
+ transport: str = "grpc_asyncio", request_type=translation_service.GetGlossaryRequest
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.GetGlossaryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.Glossary(name="name_value", entry_count=1210,)
@@ -1775,7 +2227,7 @@ async def test_get_glossary_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.GetGlossaryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, translation_service.Glossary)
@@ -1785,6 +2237,11 @@ async def test_get_glossary_async(transport: str = "grpc_asyncio"):
assert response.entry_count == 1210
+@pytest.mark.asyncio
+async def test_get_glossary_async_from_dict():
+ await test_get_glossary_async(request_type=dict)
+
+
def test_get_glossary_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1794,7 +2251,7 @@ def test_get_glossary_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
call.return_value = translation_service.Glossary()
client.get_glossary(request)
@@ -1821,9 +2278,7 @@ async def test_get_glossary_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
translation_service.Glossary()
)
@@ -1844,7 +2299,7 @@ def test_get_glossary_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.Glossary()
@@ -1878,9 +2333,7 @@ async def test_get_glossary_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = translation_service.Glossary()
@@ -1925,7 +2378,7 @@ def test_delete_glossary(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1945,20 +2398,37 @@ def test_delete_glossary_from_dict():
test_delete_glossary(request_type=dict)
+def test_delete_glossary_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranslationServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
+ client.delete_glossary()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == translation_service.DeleteGlossaryRequest()
+
+
@pytest.mark.asyncio
-async def test_delete_glossary_async(transport: str = "grpc_asyncio"):
+async def test_delete_glossary_async(
+ transport: str = "grpc_asyncio",
+ request_type=translation_service.DeleteGlossaryRequest,
+):
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = translation_service.DeleteGlossaryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1970,12 +2440,17 @@ async def test_delete_glossary_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == translation_service.DeleteGlossaryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_delete_glossary_async_from_dict():
+ await test_delete_glossary_async(request_type=dict)
+
+
def test_delete_glossary_field_headers():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1985,7 +2460,7 @@ def test_delete_glossary_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_glossary(request)
@@ -2012,9 +2487,7 @@ async def test_delete_glossary_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -2035,7 +2508,7 @@ def test_delete_glossary_flattened():
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_glossary), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2069,9 +2542,7 @@ async def test_delete_glossary_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_glossary), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2140,7 +2611,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = TranslationServiceClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -2176,7 +2647,7 @@ def test_transport_adc(transport_class):
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = TranslationServiceClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.TranslationServiceGrpcTransport,)
+ assert isinstance(client.transport, transports.TranslationServiceGrpcTransport,)
def test_translation_service_base_transport_error():
@@ -2204,7 +2675,9 @@ def test_translation_service_base_transport():
"translate_text",
"detect_language",
"get_supported_languages",
+ "translate_document",
"batch_translate_text",
+ "batch_translate_document",
"create_glossary",
"list_glossaries",
"get_glossary",
@@ -2284,6 +2757,56 @@ def test_translation_service_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranslationServiceGrpcTransport,
+ transports.TranslationServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_translation_service_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-translation",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_translation_service_host_no_port():
client = TranslationServiceClient(
credentials=credentials.AnonymousCredentials(),
@@ -2291,7 +2814,7 @@ def test_translation_service_host_no_port():
api_endpoint="translate.googleapis.com"
),
)
- assert client._transport._host == "translate.googleapis.com:443"
+ assert client.transport._host == "translate.googleapis.com:443"
def test_translation_service_host_with_port():
@@ -2301,11 +2824,11 @@ def test_translation_service_host_with_port():
api_endpoint="translate.googleapis.com:8000"
),
)
- assert client._transport._host == "translate.googleapis.com:8000"
+ assert client.transport._host == "translate.googleapis.com:8000"
def test_translation_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.TranslationServiceGrpcTransport(
@@ -2313,10 +2836,11 @@ def test_translation_service_grpc_transport_channel():
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
def test_translation_service_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.TranslationServiceGrpcAsyncIOTransport(
@@ -2324,8 +2848,11 @@ def test_translation_service_grpc_asyncio_transport_channel():
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2340,7 +2867,7 @@ def test_translation_service_transport_channel_mtls_with_client_cert_source(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -2372,10 +2899,17 @@ def test_translation_service_transport_channel_mtls_with_client_cert_source(
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2391,7 +2925,7 @@ def test_translation_service_transport_channel_mtls_with_adc(transport_class):
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
@@ -2415,6 +2949,10 @@ def test_translation_service_transport_channel_mtls_with_adc(transport_class):
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
@@ -2423,7 +2961,7 @@ def test_translation_service_grpc_lro_client():
client = TranslationServiceClient(
credentials=credentials.AnonymousCredentials(), transport="grpc",
)
- transport = client._transport
+ transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
@@ -2436,7 +2974,7 @@ def test_translation_service_grpc_lro_async_client():
client = TranslationServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
- transport = client._client._transport
+ transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
@@ -2470,6 +3008,107 @@ def test_parse_glossary_path():
assert expected == actual
+def test_common_billing_account_path():
+ billing_account = "cuttlefish"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = TranslationServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "mussel",
+ }
+ path = TranslationServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "winkle"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = TranslationServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nautilus",
+ }
+ path = TranslationServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "scallop"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = TranslationServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "abalone",
+ }
+ path = TranslationServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "squid"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = TranslationServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "clam",
+ }
+ path = TranslationServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "whelk"
+ location = "octopus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = TranslationServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ }
+ path = TranslationServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranslationServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
diff --git a/translation-v3-py.tar.gz b/translation-v3-py.tar.gz
new file mode 100644
index 00000000..e69de29b