diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..4558c4c
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,11 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+#
+# For syntax help see:
+# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+
+# The @googleapis/api-bigquery is the default owner for changes in this repo
+* @googleapis/api-bigquery
+
+# The python-samples-reviewers team is the default owner for samples changes
+/samples/ @googleapis/python-samples-owners
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 0000000..e69de29
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 6fd1f17..4365360 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,7 +30,7 @@ env_vars: {
env_vars: {
key: "V2_STAGING_BUCKET"
- value: "docs-staging-v2-staging"
+ value: "docs-staging-v2"
}
# It will upload the docker image after successful builds.
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 0000000..f525142
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 87b1597..02eb4a6 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
value: "github/python-bigquery-connection/.kokoro/release.sh"
}
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
}
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index e8ecbbe..8c283c8 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-connection/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 91bdbfe..690d232 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-connection/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 52bcde6..126f9e4 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-connection/.kokoro/test-samples.sh"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 8ae22c3..2ce7041 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
git checkout $LATEST_RELEASE
fi
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -101,4 +107,4 @@ cd "$ROOT"
# Workaround for Kokoro permissions issue: delete secrets
rm testing/{test-env.sh,client-secrets.json,service-account.json}
-exit "$RTN"
\ No newline at end of file
+exit "$RTN"
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251..f39236e 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f4b6773..22659fe 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
# Changelog
+## [0.3.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v0.2.0...v0.3.0) (2020-10-28)
+
+
+### Features
+
+* add AWS connection type ([#19](https://www.github.com/googleapis/python-bigquery-connection/issues/19)) ([3d1a41a](https://www.github.com/googleapis/python-bigquery-connection/commit/3d1a41ad208274448604a0a17d072f6fcb36535a))
+
## [0.2.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v0.1.0...v0.2.0) (2020-08-10)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 3ab6e4c..b76c228 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
diff --git a/docs/conf.py b/docs/conf.py
index 9bfebf1..2688cc1 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -39,6 +39,7 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
diff --git a/docs/connection_v1/types.rst b/docs/connection_v1/types.rst
index 3a687b0..85d883a 100644
--- a/docs/connection_v1/types.rst
+++ b/docs/connection_v1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Bigquery Connection v1 API
.. automodule:: google.cloud.bigquery.connection_v1.types
:members:
+ :show-inheritance:
diff --git a/google/cloud/bigquery/connection/__init__.py b/google/cloud/bigquery/connection/__init__.py
index 6743e02..c46bd3f 100644
--- a/google/cloud/bigquery/connection/__init__.py
+++ b/google/cloud/bigquery/connection/__init__.py
@@ -21,6 +21,8 @@
from google.cloud.bigquery.connection_v1.services.connection_service.client import (
ConnectionServiceClient,
)
+from google.cloud.bigquery.connection_v1.types.connection import AwsCrossAccountRole
+from google.cloud.bigquery.connection_v1.types.connection import AwsProperties
from google.cloud.bigquery.connection_v1.types.connection import CloudSqlCredential
from google.cloud.bigquery.connection_v1.types.connection import CloudSqlProperties
from google.cloud.bigquery.connection_v1.types.connection import Connection
@@ -32,6 +34,8 @@
from google.cloud.bigquery.connection_v1.types.connection import UpdateConnectionRequest
__all__ = (
+ "AwsCrossAccountRole",
+ "AwsProperties",
"CloudSqlCredential",
"CloudSqlProperties",
"Connection",
diff --git a/google/cloud/bigquery/connection_v1/__init__.py b/google/cloud/bigquery/connection_v1/__init__.py
index 82e9291..c3ee8e4 100644
--- a/google/cloud/bigquery/connection_v1/__init__.py
+++ b/google/cloud/bigquery/connection_v1/__init__.py
@@ -16,6 +16,8 @@
#
from .services.connection_service import ConnectionServiceClient
+from .types.connection import AwsCrossAccountRole
+from .types.connection import AwsProperties
from .types.connection import CloudSqlCredential
from .types.connection import CloudSqlProperties
from .types.connection import Connection
@@ -28,6 +30,8 @@
__all__ = (
+ "AwsCrossAccountRole",
+ "AwsProperties",
"CloudSqlCredential",
"CloudSqlProperties",
"Connection",
diff --git a/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py b/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py
index bb083c8..5035b81 100644
--- a/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py
+++ b/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py
@@ -35,7 +35,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import ConnectionServiceTransport
+from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
from .client import ConnectionServiceClient
@@ -49,10 +49,49 @@ class ConnectionServiceAsyncClient:
DEFAULT_MTLS_ENDPOINT = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT
connection_path = staticmethod(ConnectionServiceClient.connection_path)
+ parse_connection_path = staticmethod(ConnectionServiceClient.parse_connection_path)
+
+ common_billing_account_path = staticmethod(
+ ConnectionServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ ConnectionServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(ConnectionServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ ConnectionServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ ConnectionServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ ConnectionServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(ConnectionServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ ConnectionServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(ConnectionServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ ConnectionServiceClient.parse_common_location_path
+ )
from_service_account_file = ConnectionServiceClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> ConnectionServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ ConnectionServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient)
)
@@ -63,6 +102,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, ConnectionServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the connection service client.
@@ -78,16 +118,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -95,7 +138,10 @@ def __init__(
"""
self._client = ConnectionServiceClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
async def create_connection(
@@ -149,7 +195,8 @@ async def create_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, connection, connection_id]):
+ has_flattened_params = any([parent, connection, connection_id])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -172,7 +219,7 @@ async def create_connection(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_connection,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -225,7 +272,8 @@ async def get_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -248,11 +296,11 @@ async def get_connection(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -307,7 +355,8 @@ async def list_connections(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -330,11 +379,11 @@ async def list_connections(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -409,7 +458,8 @@ async def update_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, connection, update_mask]):
+ has_flattened_params = any([name, connection, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -432,7 +482,7 @@ async def update_connection(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_connection,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -478,7 +528,8 @@ async def delete_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -501,11 +552,11 @@ async def delete_connection(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -623,7 +674,8 @@ async def get_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -635,20 +687,14 @@ async def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_iam_policy,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -769,7 +815,8 @@ async def set_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -781,20 +828,14 @@ async def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -862,7 +903,8 @@ async def test_iam_permissions(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource, permissions]):
+ has_flattened_params = any([resource, permissions])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -874,23 +916,16 @@ async def test_iam_permissions(
request = iam_policy.TestIamPermissionsRequest(**request)
elif not request:
- request = iam_policy.TestIamPermissionsRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
-
- if permissions:
- request.permissions.extend(permissions)
+ request = iam_policy.TestIamPermissionsRequest(
+ resource=resource, permissions=permissions,
+ )
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -907,13 +942,13 @@ async def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-connection",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ConnectionServiceAsyncClient",)
diff --git a/google/cloud/bigquery/connection_v1/services/connection_service/client.py b/google/cloud/bigquery/connection_v1/services/connection_service/client.py
index 28f6ede..4844ccc 100644
--- a/google/cloud/bigquery/connection_v1/services/connection_service/client.py
+++ b/google/cloud/bigquery/connection_v1/services/connection_service/client.py
@@ -16,17 +16,19 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
@@ -37,7 +39,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import ConnectionServiceTransport
+from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import ConnectionServiceGrpcTransport
from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
@@ -134,6 +136,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> ConnectionServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ ConnectionServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def connection_path(project: str, location: str, connection: str,) -> str:
"""Return a fully-qualified connection string."""
@@ -150,12 +161,72 @@ def parse_connection_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, ConnectionServiceTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, ConnectionServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the connection service client.
@@ -168,48 +239,74 @@ def __init__(
transport (Union[str, ~.ConnectionServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -233,11 +330,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def create_connection(
@@ -764,13 +861,7 @@ def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -907,13 +998,7 @@ def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -997,16 +1082,9 @@ def test_iam_permissions(
request = iam_policy.TestIamPermissionsRequest(**request)
elif not request:
- request = iam_policy.TestIamPermissionsRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
-
- if permissions:
- request.permissions.extend(permissions)
+ request = iam_policy.TestIamPermissionsRequest(
+ resource=resource, permissions=permissions,
+ )
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -1026,13 +1104,13 @@ def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-connection",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ConnectionServiceClient",)
diff --git a/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py b/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py
index c0e7159..a1cf970 100644
--- a/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py
+++ b/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py
@@ -19,7 +19,7 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -33,13 +33,13 @@
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-connection",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class ConnectionServiceTransport(abc.ABC):
@@ -58,6 +58,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -75,6 +76,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -102,13 +108,13 @@ def __init__(
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages()
+ self._prep_wrapped_messages(client_info)
- def _prep_wrapped_messages(self):
+ def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_connection: gapic_v1.method.wrap_method(
- self.create_connection, default_timeout=60.0, client_info=_client_info,
+ self.create_connection, default_timeout=60.0, client_info=client_info,
),
self.get_connection: gapic_v1.method.wrap_method(
self.get_connection,
@@ -117,11 +123,11 @@ def _prep_wrapped_messages(self):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.list_connections: gapic_v1.method.wrap_method(
self.list_connections,
@@ -130,14 +136,14 @@ def _prep_wrapped_messages(self):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.update_connection: gapic_v1.method.wrap_method(
- self.update_connection, default_timeout=60.0, client_info=_client_info,
+ self.update_connection, default_timeout=60.0, client_info=client_info,
),
self.delete_connection: gapic_v1.method.wrap_method(
self.delete_connection,
@@ -146,22 +152,22 @@ def _prep_wrapped_messages(self):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
- self.get_iam_policy, default_timeout=60.0, client_info=_client_info,
+ self.get_iam_policy, default_timeout=60.0, client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
- self.set_iam_policy, default_timeout=60.0, client_info=_client_info,
+ self.set_iam_policy, default_timeout=60.0, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
}
diff --git a/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py b/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py
index 8c1a3f2..aedcbdb 100644
--- a/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py
+++ b/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py
@@ -15,14 +15,15 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.bigquery.connection_v1.types import connection
@@ -31,7 +32,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import ConnectionServiceTransport
+from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
class ConnectionServiceGrpcTransport(ConnectionServiceTransport):
@@ -59,7 +60,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -78,16 +81,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -95,6 +105,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -102,7 +114,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -133,6 +151,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -143,6 +179,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
@classmethod
@@ -153,7 +190,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -187,24 +224,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py b/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py
index 342bdc9..a091147 100644
--- a/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py
+++ b/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py
@@ -15,9 +15,12 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -30,7 +33,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import ConnectionServiceTransport
+from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ConnectionServiceGrpcTransport
@@ -101,7 +104,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -121,16 +126,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -138,6 +150,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -145,13 +159,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -171,6 +196,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -179,6 +222,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -190,13 +234,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/bigquery/connection_v1/types/__init__.py b/google/cloud/bigquery/connection_v1/types/__init__.py
index 66a95cb..56d9c8b 100644
--- a/google/cloud/bigquery/connection_v1/types/__init__.py
+++ b/google/cloud/bigquery/connection_v1/types/__init__.py
@@ -25,6 +25,8 @@
Connection,
CloudSqlProperties,
CloudSqlCredential,
+ AwsProperties,
+ AwsCrossAccountRole,
)
@@ -38,4 +40,6 @@
"Connection",
"CloudSqlProperties",
"CloudSqlCredential",
+ "AwsProperties",
+ "AwsCrossAccountRole",
)
diff --git a/google/cloud/bigquery/connection_v1/types/connection.py b/google/cloud/bigquery/connection_v1/types/connection.py
index 35c8c39..a964123 100644
--- a/google/cloud/bigquery/connection_v1/types/connection.py
+++ b/google/cloud/bigquery/connection_v1/types/connection.py
@@ -33,6 +33,8 @@
"Connection",
"CloudSqlProperties",
"CloudSqlCredential",
+ "AwsProperties",
+ "AwsCrossAccountRole",
},
)
@@ -163,6 +165,8 @@ class Connection(proto.Message):
User provided description.
cloud_sql (~.gcbc_connection.CloudSqlProperties):
Cloud SQL properties.
+ aws (~.gcbc_connection.AwsProperties):
+ Amazon Web Services (AWS) properties.
creation_time (int):
Output only. The creation timestamp of the
connection.
@@ -184,6 +188,10 @@ class Connection(proto.Message):
proto.MESSAGE, number=4, oneof="properties", message="CloudSqlProperties",
)
+ aws = proto.Field(
+ proto.MESSAGE, number=8, oneof="properties", message="AwsProperties",
+ )
+
creation_time = proto.Field(proto.INT64, number=5)
last_modified_time = proto.Field(proto.INT64, number=6)
@@ -200,7 +208,7 @@ class CloudSqlProperties(proto.Message):
``project:location:instance``.
database (str):
Database name.
- type (~.gcbc_connection.CloudSqlProperties.DatabaseType):
+ type_ (~.gcbc_connection.CloudSqlProperties.DatabaseType):
Type of the Cloud SQL database.
credential (~.gcbc_connection.CloudSqlCredential):
Input only. Cloud SQL credential.
@@ -216,7 +224,7 @@ class DatabaseType(proto.Enum):
database = proto.Field(proto.STRING, number=2)
- type = proto.Field(proto.ENUM, number=3, enum=DatabaseType,)
+ type_ = proto.Field(proto.ENUM, number=3, enum=DatabaseType,)
credential = proto.Field(proto.MESSAGE, number=4, message="CloudSqlCredential",)
@@ -236,4 +244,48 @@ class CloudSqlCredential(proto.Message):
password = proto.Field(proto.STRING, number=2)
+class AwsProperties(proto.Message):
+ r"""Connection properties specific to Amazon Web Services (AWS).
+
+ Attributes:
+ cross_account_role (~.gcbc_connection.AwsCrossAccountRole):
+ Authentication using Google owned AWS IAM
+ user's access key to assume into customer's AWS
+ IAM Role.
+ """
+
+ cross_account_role = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ oneof="authentication_method",
+ message="AwsCrossAccountRole",
+ )
+
+
+class AwsCrossAccountRole(proto.Message):
+ r"""Authentication method for Amazon Web Services (AWS) that uses
+ Google owned AWS IAM user's access key to assume into customer's
+ AWS IAM Role.
+
+ Attributes:
+ iam_role_id (str):
+ The user’s AWS IAM Role that trusts the
+ Google-owned AWS IAM user Connection.
+ iam_user_id (str):
+ Output only. Google-owned AWS IAM User for a
+ Connection.
+ external_id (str):
+ Output only. A Google-generated id for representing
+ Connection’s identity in AWS. External Id is also used for
+ preventing the Confused Deputy Problem. See
+ https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html
+ """
+
+ iam_role_id = proto.Field(proto.STRING, number=1)
+
+ iam_user_id = proto.Field(proto.STRING, number=2)
+
+ external_id = proto.Field(proto.STRING, number=3)
+
+
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index 7269a23..0f561b2 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -173,7 +173,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb..21f6d2a 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/fixup_connection_v1_keywords.py b/scripts/fixup_connection_v1_keywords.py
index b945c30..719a3b8 100644
--- a/scripts/fixup_connection_v1_keywords.py
+++ b/scripts/fixup_connection_v1_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
diff --git a/setup.py b/setup.py
index 1323f34..6cee319 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
import os
import setuptools # type: ignore
-version = "0.2.0"
+version = "0.3.0"
package_root = os.path.abspath(os.path.dirname(__file__))
diff --git a/synth.metadata b/synth.metadata
index 4a055ff..8846665 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -4,29 +4,29 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/python-bigquery-connection.git",
- "sha": "58eb8615e1858b50a9727db7a56cec3610959d4f"
+ "sha": "1a5e7b868a239a92727b590dcd458f771c34b33a"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "fb84629a56703d04f0b5304c4a9ade7313ebd92d",
- "internalRef": "325339219"
+ "sha": "07d41a7e5cade45aba6f0d277c89722b48f2c956",
+ "internalRef": "339292950"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3"
+ "sha": "da5c6050d13b4950c82666a81d8acd25157664ae"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3"
+ "sha": "da5c6050d13b4950c82666a81d8acd25157664ae"
}
}
],
@@ -40,5 +40,91 @@
"generator": "bazel"
}
}
+ ],
+ "generatedFiles": [
+ ".flake8",
+ ".github/CONTRIBUTING.md",
+ ".github/ISSUE_TEMPLATE/bug_report.md",
+ ".github/ISSUE_TEMPLATE/feature_request.md",
+ ".github/ISSUE_TEMPLATE/support_request.md",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/release-please.yml",
+ ".github/snippet-bot.yml",
+ ".gitignore",
+ ".kokoro/build.sh",
+ ".kokoro/continuous/common.cfg",
+ ".kokoro/continuous/continuous.cfg",
+ ".kokoro/docker/docs/Dockerfile",
+ ".kokoro/docker/docs/fetch_gpg_keys.sh",
+ ".kokoro/docs/common.cfg",
+ ".kokoro/docs/docs-presubmit.cfg",
+ ".kokoro/docs/docs.cfg",
+ ".kokoro/populate-secrets.sh",
+ ".kokoro/presubmit/common.cfg",
+ ".kokoro/presubmit/presubmit.cfg",
+ ".kokoro/publish-docs.sh",
+ ".kokoro/release.sh",
+ ".kokoro/release/common.cfg",
+ ".kokoro/release/release.cfg",
+ ".kokoro/samples/lint/common.cfg",
+ ".kokoro/samples/lint/continuous.cfg",
+ ".kokoro/samples/lint/periodic.cfg",
+ ".kokoro/samples/lint/presubmit.cfg",
+ ".kokoro/samples/python3.6/common.cfg",
+ ".kokoro/samples/python3.6/continuous.cfg",
+ ".kokoro/samples/python3.6/periodic.cfg",
+ ".kokoro/samples/python3.6/presubmit.cfg",
+ ".kokoro/samples/python3.7/common.cfg",
+ ".kokoro/samples/python3.7/continuous.cfg",
+ ".kokoro/samples/python3.7/periodic.cfg",
+ ".kokoro/samples/python3.7/presubmit.cfg",
+ ".kokoro/samples/python3.8/common.cfg",
+ ".kokoro/samples/python3.8/continuous.cfg",
+ ".kokoro/samples/python3.8/periodic.cfg",
+ ".kokoro/samples/python3.8/presubmit.cfg",
+ ".kokoro/test-samples.sh",
+ ".kokoro/trampoline.sh",
+ ".kokoro/trampoline_v2.sh",
+ ".trampolinerc",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.rst",
+ "LICENSE",
+ "MANIFEST.in",
+ "docs/_static/custom.css",
+ "docs/_templates/layout.html",
+ "docs/conf.py",
+ "docs/connection_v1/services.rst",
+ "docs/connection_v1/types.rst",
+ "docs/multiprocessing.rst",
+ "google/cloud/bigquery/connection/__init__.py",
+ "google/cloud/bigquery/connection/py.typed",
+ "google/cloud/bigquery/connection_v1/__init__.py",
+ "google/cloud/bigquery/connection_v1/py.typed",
+ "google/cloud/bigquery/connection_v1/services/__init__.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/__init__.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/async_client.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/client.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/pagers.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/transports/__init__.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py",
+ "google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py",
+ "google/cloud/bigquery/connection_v1/types/__init__.py",
+ "google/cloud/bigquery/connection_v1/types/connection.py",
+ "mypy.ini",
+ "noxfile.py",
+ "renovate.json",
+ "scripts/decrypt-secrets.sh",
+ "scripts/fixup_connection_v1_keywords.py",
+ "scripts/readme-gen/readme_gen.py",
+ "scripts/readme-gen/templates/README.tmpl.rst",
+ "scripts/readme-gen/templates/auth.tmpl.rst",
+ "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
+ "scripts/readme-gen/templates/install_deps.tmpl.rst",
+ "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
+ "setup.cfg",
+ "testing/.gitignore",
+ "tests/unit/gapic/connection_v1/__init__.py",
+ "tests/unit/gapic/connection_v1/test_connection_service.py"
]
}
\ No newline at end of file
diff --git a/tests/unit/gapic/connection_v1/test_connection_service.py b/tests/unit/gapic/connection_v1/test_connection_service.py
index cdedca9..7469f9f 100644
--- a/tests/unit/gapic/connection_v1/test_connection_service.py
+++ b/tests/unit/gapic/connection_v1/test_connection_service.py
@@ -105,12 +105,12 @@ def test_connection_service_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "bigqueryconnection.googleapis.com:443"
+ assert client.transport._host == "bigqueryconnection.googleapis.com:443"
def test_connection_service_client_get_transport_class():
@@ -166,14 +166,14 @@ def test_connection_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -182,14 +182,14 @@ def test_connection_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -198,90 +198,185 @@ def test_connection_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ ConnectionServiceClient,
+ transports.ConnectionServiceGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ ConnectionServiceAsyncClient,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ ConnectionServiceClient,
+ transports.ConnectionServiceGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ ConnectionServiceAsyncClient,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ ConnectionServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(ConnectionServiceClient),
+)
+@mock.patch.object(
+ ConnectionServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(ConnectionServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_connection_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -308,9 +403,9 @@ def test_connection_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -338,9 +433,9 @@ def test_connection_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -357,9 +452,9 @@ def test_connection_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -376,7 +471,7 @@ def test_create_connection(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_connection), "__call__"
+ type(client.transport.create_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection(
@@ -400,6 +495,7 @@ def test_create_connection(
assert args[0] == gcbc_connection.CreateConnectionRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, gcbc_connection.Connection)
assert response.name == "name_value"
@@ -420,18 +516,21 @@ def test_create_connection_from_dict():
@pytest.mark.asyncio
-async def test_create_connection_async(transport: str = "grpc_asyncio"):
+async def test_create_connection_async(
+ transport: str = "grpc_asyncio",
+ request_type=gcbc_connection.CreateConnectionRequest,
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = gcbc_connection.CreateConnectionRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_connection), "__call__"
+ type(client.transport.create_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -451,7 +550,7 @@ async def test_create_connection_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == gcbc_connection.CreateConnectionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbc_connection.Connection)
@@ -469,6 +568,11 @@ async def test_create_connection_async(transport: str = "grpc_asyncio"):
assert response.has_credential is True
+@pytest.mark.asyncio
+async def test_create_connection_async_from_dict():
+ await test_create_connection_async(request_type=dict)
+
+
def test_create_connection_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -479,7 +583,7 @@ def test_create_connection_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_connection), "__call__"
+ type(client.transport.create_connection), "__call__"
) as call:
call.return_value = gcbc_connection.Connection()
@@ -508,7 +612,7 @@ async def test_create_connection_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_connection), "__call__"
+ type(client.transport.create_connection), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbc_connection.Connection()
@@ -531,7 +635,7 @@ def test_create_connection_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_connection), "__call__"
+ type(client.transport.create_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection()
@@ -578,7 +682,7 @@ async def test_create_connection_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_connection), "__call__"
+ type(client.transport.create_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection()
@@ -635,7 +739,7 @@ def test_get_connection(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.Connection(
name="name_value",
@@ -656,6 +760,7 @@ def test_get_connection(
assert args[0] == connection.GetConnectionRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, connection.Connection)
assert response.name == "name_value"
@@ -676,19 +781,19 @@ def test_get_connection_from_dict():
@pytest.mark.asyncio
-async def test_get_connection_async(transport: str = "grpc_asyncio"):
+async def test_get_connection_async(
+ transport: str = "grpc_asyncio", request_type=connection.GetConnectionRequest
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = connection.GetConnectionRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_connection), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.Connection(
@@ -707,7 +812,7 @@ async def test_get_connection_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == connection.GetConnectionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, connection.Connection)
@@ -725,6 +830,11 @@ async def test_get_connection_async(transport: str = "grpc_asyncio"):
assert response.has_credential is True
+@pytest.mark.asyncio
+async def test_get_connection_async_from_dict():
+ await test_get_connection_async(request_type=dict)
+
+
def test_get_connection_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -734,7 +844,7 @@ def test_get_connection_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
call.return_value = connection.Connection()
client.get_connection(request)
@@ -761,9 +871,7 @@ async def test_get_connection_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_connection), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.Connection()
)
@@ -784,7 +892,7 @@ def test_get_connection_flattened():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.Connection()
@@ -818,9 +926,7 @@ async def test_get_connection_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_connection), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_connection), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.Connection()
@@ -865,9 +971,7 @@ def test_list_connections(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.ListConnectionsResponse(
next_page_token="next_page_token_value",
@@ -882,6 +986,7 @@ def test_list_connections(
assert args[0] == connection.ListConnectionsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListConnectionsPager)
assert response.next_page_token == "next_page_token_value"
@@ -892,19 +997,19 @@ def test_list_connections_from_dict():
@pytest.mark.asyncio
-async def test_list_connections_async(transport: str = "grpc_asyncio"):
+async def test_list_connections_async(
+ transport: str = "grpc_asyncio", request_type=connection.ListConnectionsRequest
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = connection.ListConnectionsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.ListConnectionsResponse(next_page_token="next_page_token_value",)
@@ -916,7 +1021,7 @@ async def test_list_connections_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == connection.ListConnectionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConnectionsAsyncPager)
@@ -924,6 +1029,11 @@ async def test_list_connections_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_connections_async_from_dict():
+ await test_list_connections_async(request_type=dict)
+
+
def test_list_connections_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -933,9 +1043,7 @@ def test_list_connections_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
call.return_value = connection.ListConnectionsResponse()
client.list_connections(request)
@@ -962,9 +1070,7 @@ async def test_list_connections_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
connection.ListConnectionsResponse()
)
@@ -985,9 +1091,7 @@ def test_list_connections_flattened():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.ListConnectionsResponse()
@@ -1021,9 +1125,7 @@ async def test_list_connections_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = connection.ListConnectionsResponse()
@@ -1060,9 +1162,7 @@ def test_list_connections_pager():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
connection.ListConnectionsResponse(
@@ -1100,9 +1200,7 @@ def test_list_connections_pages():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_connections), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
connection.ListConnectionsResponse(
@@ -1123,8 +1221,8 @@ def test_list_connections_pages():
RuntimeError,
)
pages = list(client.list_connections(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -1133,9 +1231,7 @@ async def test_list_connections_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_connections),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1172,9 +1268,7 @@ async def test_list_connections_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_connections),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1196,10 +1290,10 @@ async def test_list_connections_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_connections(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_connections(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_update_connection(
@@ -1215,7 +1309,7 @@ def test_update_connection(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_connection), "__call__"
+ type(client.transport.update_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection(
@@ -1239,6 +1333,7 @@ def test_update_connection(
assert args[0] == gcbc_connection.UpdateConnectionRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, gcbc_connection.Connection)
assert response.name == "name_value"
@@ -1259,18 +1354,21 @@ def test_update_connection_from_dict():
@pytest.mark.asyncio
-async def test_update_connection_async(transport: str = "grpc_asyncio"):
+async def test_update_connection_async(
+ transport: str = "grpc_asyncio",
+ request_type=gcbc_connection.UpdateConnectionRequest,
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = gcbc_connection.UpdateConnectionRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_connection), "__call__"
+ type(client.transport.update_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1290,7 +1388,7 @@ async def test_update_connection_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == gcbc_connection.UpdateConnectionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbc_connection.Connection)
@@ -1308,6 +1406,11 @@ async def test_update_connection_async(transport: str = "grpc_asyncio"):
assert response.has_credential is True
+@pytest.mark.asyncio
+async def test_update_connection_async_from_dict():
+ await test_update_connection_async(request_type=dict)
+
+
def test_update_connection_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1318,7 +1421,7 @@ def test_update_connection_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_connection), "__call__"
+ type(client.transport.update_connection), "__call__"
) as call:
call.return_value = gcbc_connection.Connection()
@@ -1347,7 +1450,7 @@ async def test_update_connection_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_connection), "__call__"
+ type(client.transport.update_connection), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbc_connection.Connection()
@@ -1370,7 +1473,7 @@ def test_update_connection_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_connection), "__call__"
+ type(client.transport.update_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection()
@@ -1417,7 +1520,7 @@ async def test_update_connection_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_connection), "__call__"
+ type(client.transport.update_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbc_connection.Connection()
@@ -1475,7 +1578,7 @@ def test_delete_connection(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_connection), "__call__"
+ type(client.transport.delete_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1497,18 +1600,20 @@ def test_delete_connection_from_dict():
@pytest.mark.asyncio
-async def test_delete_connection_async(transport: str = "grpc_asyncio"):
+async def test_delete_connection_async(
+ transport: str = "grpc_asyncio", request_type=connection.DeleteConnectionRequest
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = connection.DeleteConnectionRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_connection), "__call__"
+ type(client.transport.delete_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1519,12 +1624,17 @@ async def test_delete_connection_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == connection.DeleteConnectionRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_connection_async_from_dict():
+ await test_delete_connection_async(request_type=dict)
+
+
def test_delete_connection_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1535,7 +1645,7 @@ def test_delete_connection_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_connection), "__call__"
+ type(client.transport.delete_connection), "__call__"
) as call:
call.return_value = None
@@ -1564,7 +1674,7 @@ async def test_delete_connection_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_connection), "__call__"
+ type(client.transport.delete_connection), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1585,7 +1695,7 @@ def test_delete_connection_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_connection), "__call__"
+ type(client.transport.delete_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1621,7 +1731,7 @@ async def test_delete_connection_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_connection), "__call__"
+ type(client.transport.delete_connection), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1665,7 +1775,7 @@ def test_get_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -1678,6 +1788,7 @@ def test_get_iam_policy(
assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -1690,19 +1801,19 @@ def test_get_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_get_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.GetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -1714,7 +1825,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -1724,6 +1835,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_get_iam_policy_async_from_dict():
+ await test_get_iam_policy_async(request_type=dict)
+
+
def test_get_iam_policy_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1733,7 +1849,7 @@ def test_get_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.get_iam_policy(request)
@@ -1760,9 +1876,7 @@ async def test_get_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.get_iam_policy(request)
@@ -1777,10 +1891,10 @@ async def test_get_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_get_iam_policy_from_dict():
+def test_get_iam_policy_from_dict_foreign():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -1797,7 +1911,7 @@ def test_get_iam_policy_flattened():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -1831,9 +1945,7 @@ async def test_get_iam_policy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -1876,7 +1988,7 @@ def test_set_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -1889,6 +2001,7 @@ def test_set_iam_policy(
assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -1901,19 +2014,19 @@ def test_set_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_set_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.SetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -1925,7 +2038,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -1935,6 +2048,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_set_iam_policy_async_from_dict():
+ await test_set_iam_policy_async(request_type=dict)
+
+
def test_set_iam_policy_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1944,7 +2062,7 @@ def test_set_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.set_iam_policy(request)
@@ -1971,9 +2089,7 @@ async def test_set_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.set_iam_policy(request)
@@ -1988,10 +2104,10 @@ async def test_set_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_set_iam_policy_from_dict():
+def test_set_iam_policy_from_dict_foreign():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -2008,7 +2124,7 @@ def test_set_iam_policy_flattened():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -2042,9 +2158,7 @@ async def test_set_iam_policy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -2088,7 +2202,7 @@ def test_test_iam_permissions(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse(
@@ -2104,6 +2218,7 @@ def test_test_iam_permissions(
assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -2114,18 +2229,20 @@ def test_test_iam_permissions_from_dict():
@pytest.mark.asyncio
-async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
+async def test_test_iam_permissions_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest
+):
client = ConnectionServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.TestIamPermissionsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2138,7 +2255,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
@@ -2146,6 +2263,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert response.permissions == ["permissions_value"]
+@pytest.mark.asyncio
+async def test_test_iam_permissions_async_from_dict():
+ await test_test_iam_permissions_async(request_type=dict)
+
+
def test_test_iam_permissions_field_headers():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2156,7 +2278,7 @@ def test_test_iam_permissions_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -2185,7 +2307,7 @@ async def test_test_iam_permissions_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
@@ -2203,11 +2325,11 @@ async def test_test_iam_permissions_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_test_iam_permissions_from_dict():
+def test_test_iam_permissions_from_dict_foreign():
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -2226,7 +2348,7 @@ def test_test_iam_permissions_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -2268,7 +2390,7 @@ async def test_test_iam_permissions_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -2344,7 +2466,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = ConnectionServiceClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -2362,10 +2484,25 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ConnectionServiceGrpcTransport,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.ConnectionServiceGrpcTransport,)
+ assert isinstance(client.transport, transports.ConnectionServiceGrpcTransport,)
def test_connection_service_base_transport_error():
@@ -2426,6 +2563,17 @@ def test_connection_service_base_transport_with_credentials_file():
)
+def test_connection_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.ConnectionServiceTransport()
+ adc.assert_called_once()
+
+
def test_connection_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -2464,7 +2612,7 @@ def test_connection_service_host_no_port():
api_endpoint="bigqueryconnection.googleapis.com"
),
)
- assert client._transport._host == "bigqueryconnection.googleapis.com:443"
+ assert client.transport._host == "bigqueryconnection.googleapis.com:443"
def test_connection_service_host_with_port():
@@ -2474,197 +2622,125 @@ def test_connection_service_host_with_port():
api_endpoint="bigqueryconnection.googleapis.com:8000"
),
)
- assert client._transport._host == "bigqueryconnection.googleapis.com:8000"
+ assert client.transport._host == "bigqueryconnection.googleapis.com:8000"
def test_connection_service_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_connection_service_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.ConnectionServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_connection_service_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- ),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_connection_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.ConnectionServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- ),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.ConnectionServiceGrpcTransport,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_connection_service_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
+def test_connection_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- ),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.ConnectionServiceGrpcTransport,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_connection_service_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
+def test_connection_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
- mock_cred = mock.Mock()
- transport = transports.ConnectionServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- ),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
def test_connection_path():
@@ -2690,3 +2766,125 @@ def test_parse_connection_path():
# Check that the path construction is reversible.
actual = ConnectionServiceClient.parse_connection_path(path)
assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "cuttlefish"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = ConnectionServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "mussel",
+ }
+ path = ConnectionServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ConnectionServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "winkle"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = ConnectionServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nautilus",
+ }
+ path = ConnectionServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ConnectionServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "scallop"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = ConnectionServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "abalone",
+ }
+ path = ConnectionServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ConnectionServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "squid"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = ConnectionServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "clam",
+ }
+ path = ConnectionServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ConnectionServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "whelk"
+ location = "octopus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = ConnectionServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ }
+ path = ConnectionServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ConnectionServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.ConnectionServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.ConnectionServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = ConnectionServiceClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)