From 59bf5f91110e37327e783ea425f17db7dafeb260 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 3 Jan 2025 14:32:48 -0500 Subject: [PATCH 01/10] chore(python): Update the python version in docs presubmit to use 3.10 (#584) Source-Link: https://github.com/googleapis/synthtool/commit/de3def663b75d8b9ae1e5d548364c960ff13af8f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 6 ++-- .github/workflows/docs.yml | 2 +- .github/workflows/unittest.yml | 5 ++- .kokoro/docker/docs/requirements.txt | 52 ++++++++++++++++++++++------ 4 files changed, 49 insertions(+), 16 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 6301519a..1d0fd7e7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 -# created: 2024-11-12T12:09:45.821174897Z + digest: sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 +# created: 2025-01-02T23:09:36.975468657Z diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 698fbc5c..2833fe98 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 6a0bc074..c66b757c 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -5,7 +5,10 @@ on: name: unittest jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index 8bb07645..f99a5c4a 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ @@ -23,7 +23,7 @@ filelock==3.16.1 \ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,11 +32,41 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox From 14a0f96c8f7c558a38fec4ef92c0810ea6096646 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 16 Jan 2025 12:07:03 -0800 Subject: [PATCH 02/10] chore: update protoplus for python 3.13 (#579) Fixes https://github.com/googleapis/python-datastore/issues/571 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index eb3c2660..0534aa7c 100644 --- a/setup.py +++ b/setup.py @@ -36,6 +36,7 @@ "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} From 9f83d17c132168118a2e169c975662d76540d6ff Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 11:45:08 -0500 Subject: [PATCH 03/10] chore(python): conditionally load credentials in .kokoro/build.sh (#594) Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/build.sh | 20 ++- .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 243 ++++++++++++++++++++++++++- .kokoro/publish-docs.sh | 4 - README.rst | 93 +++++++++- renovate.json | 2 +- 7 files changed, 343 insertions(+), 24 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 1d0fd7e7..3f7634f2 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 -# created: 2025-01-02T23:09:36.975468657Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index e0cd71b0..d41b45aa 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-datastore" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in index 816817c6..586bd070 100644 --- a/.kokoro/docker/docs/requirements.in +++ b/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index f99a5c4a..a9360a25 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 233205d5..4ed4aaf1 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs diff --git a/README.rst b/README.rst index c5f944dc..dc21a4e8 100644 --- a/README.rst +++ b/README.rst @@ -30,12 +30,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Google Cloud Datastore API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ @@ -110,3 +110,92 @@ Next Steps .. _Google Cloud Datastore API Product documentation: https://cloud.google.com/datastore .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + +Environment-Based Examples +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + +Code-Based Examples +^^^^^^^^^^^^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/renovate.json b/renovate.json index 39b2a0ec..c7875c46 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 7be9c4c594af2c2414e394b8bfe62574b58ef337 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 09:34:37 -0800 Subject: [PATCH 04/10] chore: Update gapic-generator-python to v1.23.2 (#569) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Configure Ruby clients for google-ads-ad_manager PiperOrigin-RevId: 689139590 Source-Link: https://github.com/googleapis/googleapis/commit/296f2ac1aa9abccb7708b639b7839faa1809087f Source-Link: https://github.com/googleapis/googleapis-gen/commit/26927362e0aa1293258fc23fe3ce83c5c21d5fbb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjY5MjczNjJlMGFhMTI5MzI1OGZjMjNmZTNjZTgzYzVjMjFkNWZiYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove body selector from http rule PiperOrigin-RevId: 693215877 Source-Link: https://github.com/googleapis/googleapis/commit/bb6b53e326ce2db403d18be7158c265e07948920 Source-Link: https://github.com/googleapis/googleapis-gen/commit/db8b5a93484ad44055b2bacc4c7cf87e970fe0ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGI4YjVhOTM0ODRhZDQ0MDU1YjJiYWNjNGM3Y2Y4N2U5NzBmZTBlZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: https://github.com/googleapis/googleapis/commit/f9b8b9150f7fcd600b0acaeef91236b1843f5e49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca1e0a1e472d6e6f5de883a5cb54724f112ce348 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add REST Interceptors which support reading metadata feat: Add support for reading selective GAPIC generation methods from service YAML chore: Update gapic-generator-python to v1.22.0 PiperOrigin-RevId: 724026024 Source-Link: https://github.com/googleapis/googleapis/commit/ad9963857109513e77eed153a66264481789109f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e291c4dd1d670eda19998de76f967e1603a48993 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTI5MWM0ZGQxZDY3MGVkYTE5OTk4ZGU3NmY5NjdlMTYwM2E0ODk5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.23.2 PiperOrigin-RevId: 732281673 Source-Link: https://github.com/googleapis/googleapis/commit/2f37e0ad56637325b24f8603284ccb6f05796f9a Source-Link: https://github.com/googleapis/googleapis-gen/commit/016b7538ba5a798f2ae423d4ccd7f82b06cdf6d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE2Yjc1MzhiYTVhNzk4ZjJhZTQyM2Q0Y2NkN2Y4MmIwNmNkZjZkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../services/datastore_admin/async_client.py | 122 ++- .../services/datastore_admin/client.py | 238 +++-- .../services/datastore_admin/pagers.py | 16 +- .../datastore_admin/transports/grpc.py | 114 +- .../transports/grpc_asyncio.py | 111 +- .../datastore_admin/transports/rest.py | 795 ++++++++++++-- .../services/datastore/async_client.py | 158 ++- .../datastore_v1/services/datastore/client.py | 274 +++-- .../services/datastore/transports/grpc.py | 114 +- .../datastore/transports/grpc_asyncio.py | 113 +- .../services/datastore/transports/rest.py | 991 ++++++++++++++++-- .../test_datastore_admin.py | 174 +-- .../unit/gapic/datastore_v1/test_datastore.py | 200 ++-- 13 files changed, 2820 insertions(+), 600 deletions(-) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 0c2572d6..245f8fb1 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -54,6 +55,15 @@ from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport from .client import DatastoreAdminClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class DatastoreAdminAsyncClient: """Google Cloud Datastore Admin API @@ -311,6 +321,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore.admin_v1.DatastoreAdminAsyncClient`.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "credentialsType": None, + }, + ) + async def export_entities( self, request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, @@ -321,7 +353,7 @@ async def export_entities( output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such @@ -420,8 +452,10 @@ async def sample_export_entities(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -434,8 +468,9 @@ async def sample_export_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [project_id, labels, entity_filter, output_url_prefix] + flattened_params = [project_id, labels, entity_filter, output_url_prefix] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -506,7 +541,7 @@ async def import_entities( entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The @@ -597,8 +632,10 @@ async def sample_import_entities(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -619,7 +656,10 @@ async def sample_import_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, input_url, entity_filter]) + flattened_params = [project_id, labels, input_url, entity_filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -685,7 +725,7 @@ async def create_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates the specified index. A newly created index's initial state is ``CREATING``. On completion of the returned @@ -739,8 +779,10 @@ async def sample_create_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -799,7 +841,7 @@ async def delete_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes an existing index. An index can only be deleted if it is in a ``READY`` or ``ERROR`` state. On successful execution of @@ -852,8 +894,10 @@ async def sample_delete_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -915,7 +959,7 @@ async def get_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Gets an index. @@ -951,8 +995,10 @@ async def sample_get_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.types.Index: @@ -1001,7 +1047,7 @@ async def list_indexes( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListIndexesAsyncPager: r"""Lists the indexes that match the specified filters. Datastore uses an eventually consistent query to fetch @@ -1041,8 +1087,10 @@ async def sample_list_indexes(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesAsyncPager: @@ -1104,7 +1152,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1115,8 +1163,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1157,7 +1207,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1168,8 +1218,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1210,7 +1262,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1226,8 +1278,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1264,7 +1318,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1279,8 +1333,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index ca54ec3c..489995bc 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers @@ -497,52 +509,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DatastoreAdminClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -648,6 +653,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -713,6 +722,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore.admin_v1.DatastoreAdminClient`.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "credentialsType": None, + }, + ) + def export_entities( self, request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, @@ -723,7 +755,7 @@ def export_entities( output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such @@ -822,8 +854,10 @@ def sample_export_entities(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -836,8 +870,9 @@ def sample_export_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [project_id, labels, entity_filter, output_url_prefix] + flattened_params = [project_id, labels, entity_filter, output_url_prefix] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -904,7 +939,7 @@ def import_entities( entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The @@ -995,8 +1030,10 @@ def sample_import_entities(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1017,7 +1054,10 @@ def sample_import_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, input_url, entity_filter]) + flattened_params = [project_id, labels, input_url, entity_filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1079,7 +1119,7 @@ def create_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates the specified index. A newly created index's initial state is ``CREATING``. On completion of the returned @@ -1133,8 +1173,10 @@ def sample_create_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1191,7 +1233,7 @@ def delete_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes an existing index. An index can only be deleted if it is in a ``READY`` or ``ERROR`` state. On successful execution of @@ -1244,8 +1286,10 @@ def sample_delete_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1305,7 +1349,7 @@ def get_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Gets an index. @@ -1341,8 +1385,10 @@ def sample_get_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.types.Index: @@ -1389,7 +1435,7 @@ def list_indexes( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListIndexesPager: r"""Lists the indexes that match the specified filters. Datastore uses an eventually consistent query to fetch @@ -1429,8 +1475,10 @@ def sample_list_indexes(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesPager: @@ -1503,7 +1551,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1514,8 +1562,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1539,16 +1589,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1556,7 +1610,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1567,8 +1621,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1592,16 +1648,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1609,7 +1669,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1625,8 +1685,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1663,7 +1725,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1678,8 +1740,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index dc61026b..4c0fa8a5 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -67,7 +67,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -81,8 +81,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) @@ -141,7 +143,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -155,8 +157,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 4d08d9c4..49828797 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,92 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreAdminGrpcTransport(DatastoreAdminTransport): """gRPC backend transport for DatastoreAdmin. @@ -237,7 +318,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -301,7 +387,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -334,7 +422,7 @@ def export_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_entities" not in self._stubs: - self._stubs["export_entities"] = self.grpc_channel.unary_unary( + self._stubs["export_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=datastore_admin.ExportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -366,7 +454,7 @@ def import_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "import_entities" not in self._stubs: - self._stubs["import_entities"] = self.grpc_channel.unary_unary( + self._stubs["import_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", request_serializer=datastore_admin.ImportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -406,7 +494,7 @@ def create_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_index" not in self._stubs: - self._stubs["create_index"] = self.grpc_channel.unary_unary( + self._stubs["create_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/CreateIndex", request_serializer=datastore_admin.CreateIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -445,7 +533,7 @@ def delete_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self.grpc_channel.unary_unary( + self._stubs["delete_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex", request_serializer=datastore_admin.DeleteIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -469,7 +557,7 @@ def get_index(self) -> Callable[[datastore_admin.GetIndexRequest], index.Index]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_index" not in self._stubs: - self._stubs["get_index"] = self.grpc_channel.unary_unary( + self._stubs["get_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", request_serializer=datastore_admin.GetIndexRequest.serialize, response_deserializer=index.Index.deserialize, @@ -500,7 +588,7 @@ def list_indexes( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + self._stubs["list_indexes"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", request_serializer=datastore_admin.ListIndexesRequest.serialize, response_deserializer=datastore_admin.ListIndexesResponse.deserialize, @@ -508,7 +596,7 @@ def list_indexes( return self._stubs["list_indexes"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -520,7 +608,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -537,7 +625,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -554,7 +642,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -573,7 +661,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 366878db..f895032d 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin @@ -34,6 +40,82 @@ from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): """gRPC AsyncIO backend transport for DatastoreAdmin. @@ -284,10 +366,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -310,7 +395,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -346,7 +431,7 @@ def export_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_entities" not in self._stubs: - self._stubs["export_entities"] = self.grpc_channel.unary_unary( + self._stubs["export_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=datastore_admin.ExportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -380,7 +465,7 @@ def import_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "import_entities" not in self._stubs: - self._stubs["import_entities"] = self.grpc_channel.unary_unary( + self._stubs["import_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", request_serializer=datastore_admin.ImportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -422,7 +507,7 @@ def create_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_index" not in self._stubs: - self._stubs["create_index"] = self.grpc_channel.unary_unary( + self._stubs["create_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/CreateIndex", request_serializer=datastore_admin.CreateIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -463,7 +548,7 @@ def delete_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self.grpc_channel.unary_unary( + self._stubs["delete_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex", request_serializer=datastore_admin.DeleteIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -489,7 +574,7 @@ def get_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_index" not in self._stubs: - self._stubs["get_index"] = self.grpc_channel.unary_unary( + self._stubs["get_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", request_serializer=datastore_admin.GetIndexRequest.serialize, response_deserializer=index.Index.deserialize, @@ -521,7 +606,7 @@ def list_indexes( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + self._stubs["list_indexes"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", request_serializer=datastore_admin.ListIndexesRequest.serialize, response_deserializer=datastore_admin.ListIndexesResponse.deserialize, @@ -609,7 +694,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -625,7 +710,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -642,7 +727,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -659,7 +744,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -678,7 +763,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 01fcdd85..1cdfd4b9 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -45,11 +46,19 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -125,8 +134,10 @@ def post_list_indexes(self, response): def pre_create_index( self, request: datastore_admin.CreateIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.CreateIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_index Override in a subclass to manipulate the request or metadata @@ -139,17 +150,42 @@ def post_create_index( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_index` interceptor runs + before the `post_create_index_with_metadata` interceptor. """ return response + def post_create_index_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_create_index_with_metadata` + interceptor in new development instead of the `post_create_index` interceptor. + When both interceptors are used, this `post_create_index_with_metadata` interceptor runs after the + `post_create_index` interceptor. The (possibly modified) response returned by + `post_create_index` will be passed to + `post_create_index_with_metadata`. + """ + return response, metadata + def pre_delete_index( self, request: datastore_admin.DeleteIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.DeleteIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_index Override in a subclass to manipulate the request or metadata @@ -162,17 +198,42 @@ def post_delete_index( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_index` interceptor runs + before the `post_delete_index_with_metadata` interceptor. """ return response + def post_delete_index_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_index_with_metadata` + interceptor in new development instead of the `post_delete_index` interceptor. + When both interceptors are used, this `post_delete_index_with_metadata` interceptor runs after the + `post_delete_index` interceptor. The (possibly modified) response returned by + `post_delete_index` will be passed to + `post_delete_index_with_metadata`. + """ + return response, metadata + def pre_export_entities( self, request: datastore_admin.ExportEntitiesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.ExportEntitiesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ExportEntitiesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_entities Override in a subclass to manipulate the request or metadata @@ -185,17 +246,42 @@ def post_export_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_export_entities` interceptor runs + before the `post_export_entities_with_metadata` interceptor. """ return response + def post_export_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_export_entities_with_metadata` + interceptor in new development instead of the `post_export_entities` interceptor. + When both interceptors are used, this `post_export_entities_with_metadata` interceptor runs after the + `post_export_entities` interceptor. The (possibly modified) response returned by + `post_export_entities` will be passed to + `post_export_entities_with_metadata`. + """ + return response, metadata + def pre_get_index( self, request: datastore_admin.GetIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.GetIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_index Override in a subclass to manipulate the request or metadata @@ -206,17 +292,40 @@ def pre_get_index( def post_get_index(self, response: index.Index) -> index.Index: """Post-rpc interceptor for get_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_index` interceptor runs + before the `post_get_index_with_metadata` interceptor. """ return response + def post_get_index_with_metadata( + self, response: index.Index, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[index.Index, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_index_with_metadata` + interceptor in new development instead of the `post_get_index` interceptor. + When both interceptors are used, this `post_get_index_with_metadata` interceptor runs after the + `post_get_index` interceptor. The (possibly modified) response returned by + `post_get_index` will be passed to + `post_get_index_with_metadata`. + """ + return response, metadata + def pre_import_entities( self, request: datastore_admin.ImportEntitiesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.ImportEntitiesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ImportEntitiesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for import_entities Override in a subclass to manipulate the request or metadata @@ -229,17 +338,42 @@ def post_import_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_import_entities` interceptor runs + before the `post_import_entities_with_metadata` interceptor. """ return response + def post_import_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_import_entities_with_metadata` + interceptor in new development instead of the `post_import_entities` interceptor. + When both interceptors are used, this `post_import_entities_with_metadata` interceptor runs after the + `post_import_entities` interceptor. The (possibly modified) response returned by + `post_import_entities` will be passed to + `post_import_entities_with_metadata`. + """ + return response, metadata + def pre_list_indexes( self, request: datastore_admin.ListIndexesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ListIndexesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_indexes Override in a subclass to manipulate the request or metadata @@ -252,17 +386,44 @@ def post_list_indexes( ) -> datastore_admin.ListIndexesResponse: """Post-rpc interceptor for list_indexes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_indexes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_indexes` interceptor runs + before the `post_list_indexes_with_metadata` interceptor. """ return response + def post_list_indexes_with_metadata( + self, + response: datastore_admin.ListIndexesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ListIndexesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_indexes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_indexes_with_metadata` + interceptor in new development instead of the `post_list_indexes` interceptor. + When both interceptors are used, this `post_list_indexes_with_metadata` interceptor runs after the + `post_list_indexes` interceptor. The (possibly modified) response returned by + `post_list_indexes` will be passed to + `post_list_indexes_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -282,8 +443,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -303,8 +466,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -326,8 +491,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -574,7 +741,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the create index method over HTTP. @@ -585,8 +752,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -599,6 +768,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_http_options() ) + request, metadata = self._interceptor.pre_create_index(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_transcoded_request( http_options, request @@ -613,6 +783,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.CreateIndex", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "CreateIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._CreateIndex._get_response( self._host, @@ -632,7 +829,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_index_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.create_index", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "CreateIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteIndex( @@ -669,7 +892,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the delete index method over HTTP. @@ -680,8 +903,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -694,6 +919,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_http_options() ) + request, metadata = self._interceptor.pre_delete_index(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_transcoded_request( http_options, request @@ -704,6 +930,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.DeleteIndex", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "DeleteIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._DeleteIndex._get_response( self._host, @@ -722,7 +975,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_index_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.delete_index", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "DeleteIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ExportEntities( @@ -760,7 +1039,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the export entities method over HTTP. @@ -771,8 +1050,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -785,6 +1066,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseExportEntities._get_http_options() ) + request, metadata = self._interceptor.pre_export_entities(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_transcoded_request( http_options, request @@ -799,6 +1081,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ExportEntities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ExportEntities", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ExportEntities._get_response( self._host, @@ -818,7 +1127,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_entities_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.export_entities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ExportEntities", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetIndex( @@ -855,7 +1190,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Call the get index method over HTTP. @@ -866,8 +1201,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.index.Index: @@ -877,6 +1214,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseGetIndex._get_http_options() ) + request, metadata = self._interceptor.pre_get_index(request, metadata) transcoded_request = ( _BaseDatastoreAdminRestTransport._BaseGetIndex._get_transcoded_request( @@ -891,6 +1229,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.GetIndex", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._GetIndex._get_response( self._host, @@ -911,7 +1276,33 @@ def __call__( pb_resp = index.Index.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_index_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = index.Index.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.get_index", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ImportEntities( @@ -949,7 +1340,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the import entities method over HTTP. @@ -960,8 +1351,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -974,6 +1367,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseImportEntities._get_http_options() ) + request, metadata = self._interceptor.pre_import_entities(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_transcoded_request( http_options, request @@ -988,6 +1382,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ImportEntities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ImportEntities", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ImportEntities._get_response( self._host, @@ -1007,7 +1428,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_entities_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.import_entities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ImportEntities", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListIndexes( @@ -1044,7 +1491,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore_admin.ListIndexesResponse: r"""Call the list indexes method over HTTP. @@ -1055,8 +1502,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore_admin.ListIndexesResponse: @@ -1068,6 +1517,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseListIndexes._get_http_options() ) + request, metadata = self._interceptor.pre_list_indexes(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseListIndexes._get_transcoded_request( http_options, request @@ -1078,6 +1528,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ListIndexes", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListIndexes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ListIndexes._get_response( self._host, @@ -1098,7 +1575,35 @@ def __call__( pb_resp = datastore_admin.ListIndexesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_indexes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_indexes_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore_admin.ListIndexesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.list_indexes", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListIndexes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1187,7 +1692,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1197,13 +1702,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) @@ -1216,6 +1724,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.CancelOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._CancelOperation._get_response( self._host, @@ -1271,7 +1806,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -1281,13 +1816,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_http_options() ) + request, metadata = self._interceptor.pre_delete_operation( request, metadata ) @@ -1300,6 +1838,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.DeleteOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._DeleteOperation._get_response( self._host, @@ -1355,7 +1920,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1365,8 +1930,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -1375,6 +1942,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseGetOperation._get_http_options() ) + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseGetOperation._get_transcoded_request( http_options, request @@ -1385,6 +1953,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.GetOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._GetOperation._get_response( self._host, @@ -1404,6 +1999,27 @@ def __call__( resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminAsyncClient.GetOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1444,7 +2060,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1454,8 +2070,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -1464,6 +2082,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseListOperations._get_http_options() ) + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseListOperations._get_transcoded_request( http_options, request @@ -1474,6 +2093,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ListOperations", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ListOperations._get_response( self._host, @@ -1493,6 +2139,27 @@ def __call__( resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminAsyncClient.ListOperations", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index fcef7a8b..88de0d08 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -54,6 +55,15 @@ from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport from .client import DatastoreClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class DatastoreAsyncClient: """Each RPC normalizes the partition IDs of the keys in its @@ -258,6 +268,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore_v1.DatastoreAsyncClient`.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.datastore.v1.Datastore", + "credentialsType": None, + }, + ) + async def lookup( self, request: Optional[Union[datastore.LookupRequest, dict]] = None, @@ -267,7 +299,7 @@ async def lookup( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -323,8 +355,10 @@ async def sample_lookup(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.LookupResponse: @@ -335,7 +369,10 @@ async def sample_lookup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, read_options, keys]) + flattened_params = [project_id, read_options, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -393,7 +430,7 @@ async def run_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -430,8 +467,10 @@ async def sample_run_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunQueryResponse: @@ -484,7 +523,7 @@ async def run_aggregation_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -521,8 +560,10 @@ async def sample_run_aggregation_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunAggregationQueryResponse: @@ -576,7 +617,7 @@ async def begin_transaction( project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -620,8 +661,10 @@ async def sample_begin_transaction(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.BeginTransactionResponse: @@ -632,7 +675,10 @@ async def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) + flattened_params = [project_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -692,7 +738,7 @@ async def commit( mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting or modifying some entities. @@ -773,8 +819,10 @@ async def sample_commit(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.CommitResponse: @@ -785,7 +833,10 @@ async def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, mode, transaction, mutations]) + flattened_params = [project_id, mode, transaction, mutations] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -847,7 +898,7 @@ async def rollback( transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -900,8 +951,10 @@ async def sample_rollback(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RollbackResponse: @@ -913,7 +966,10 @@ async def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, transaction]) + flattened_params = [project_id, transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -971,7 +1027,7 @@ async def allocate_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -1025,8 +1081,10 @@ async def sample_allocate_ids(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.AllocateIdsResponse: @@ -1037,7 +1095,10 @@ async def sample_allocate_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1097,7 +1158,7 @@ async def reserve_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -1150,8 +1211,10 @@ async def sample_reserve_ids(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.ReserveIdsResponse: @@ -1162,7 +1225,10 @@ async def sample_reserve_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1220,7 +1286,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1231,8 +1297,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1273,7 +1341,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1284,8 +1352,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1326,7 +1396,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1342,8 +1412,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1380,7 +1452,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1395,8 +1467,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index ea6fba23..aad9d820 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity @@ -448,52 +460,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = DatastoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DatastoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -599,6 +604,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -661,6 +670,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore_v1.DatastoreClient`.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.datastore.v1.Datastore", + "credentialsType": None, + }, + ) + def lookup( self, request: Optional[Union[datastore.LookupRequest, dict]] = None, @@ -670,7 +702,7 @@ def lookup( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -726,8 +758,10 @@ def sample_lookup(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.LookupResponse: @@ -738,7 +772,10 @@ def sample_lookup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, read_options, keys]) + flattened_params = [project_id, read_options, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -795,7 +832,7 @@ def run_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -832,8 +869,10 @@ def sample_run_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunQueryResponse: @@ -884,7 +923,7 @@ def run_aggregation_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -921,8 +960,10 @@ def sample_run_aggregation_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunAggregationQueryResponse: @@ -974,7 +1015,7 @@ def begin_transaction( project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -1018,8 +1059,10 @@ def sample_begin_transaction(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.BeginTransactionResponse: @@ -1030,7 +1073,10 @@ def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) + flattened_params = [project_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1087,7 +1133,7 @@ def commit( mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting or modifying some entities. @@ -1168,8 +1214,10 @@ def sample_commit(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.CommitResponse: @@ -1180,7 +1228,10 @@ def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, mode, transaction, mutations]) + flattened_params = [project_id, mode, transaction, mutations] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1241,7 +1292,7 @@ def rollback( transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -1294,8 +1345,10 @@ def sample_rollback(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RollbackResponse: @@ -1307,7 +1360,10 @@ def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, transaction]) + flattened_params = [project_id, transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1364,7 +1420,7 @@ def allocate_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -1418,8 +1474,10 @@ def sample_allocate_ids(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.AllocateIdsResponse: @@ -1430,7 +1488,10 @@ def sample_allocate_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1487,7 +1548,7 @@ def reserve_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -1540,8 +1601,10 @@ def sample_reserve_ids(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.ReserveIdsResponse: @@ -1552,7 +1615,10 @@ def sample_reserve_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1620,7 +1686,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1631,8 +1697,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1656,16 +1724,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1673,7 +1745,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1684,8 +1756,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1709,16 +1783,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1726,7 +1804,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1742,8 +1820,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1780,7 +1860,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1795,8 +1875,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 620576e2..6306c219 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,13 +24,91 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.datastore_v1.types import datastore from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreGrpcTransport(DatastoreTransport): """gRPC backend transport for Datastore. @@ -187,7 +268,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -259,7 +345,7 @@ def lookup(self) -> Callable[[datastore.LookupRequest], datastore.LookupResponse # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "lookup" not in self._stubs: - self._stubs["lookup"] = self.grpc_channel.unary_unary( + self._stubs["lookup"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Lookup", request_serializer=datastore.LookupRequest.serialize, response_deserializer=datastore.LookupResponse.deserialize, @@ -285,7 +371,7 @@ def run_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_unary( + self._stubs["run_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunQuery", request_serializer=datastore.RunQueryRequest.serialize, response_deserializer=datastore.RunQueryResponse.deserialize, @@ -313,7 +399,7 @@ def run_aggregation_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self.grpc_channel.unary_unary( + self._stubs["run_aggregation_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunAggregationQuery", request_serializer=datastore.RunAggregationQueryRequest.serialize, response_deserializer=datastore.RunAggregationQueryResponse.deserialize, @@ -341,7 +427,7 @@ def begin_transaction( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + self._stubs["begin_transaction"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/BeginTransaction", request_serializer=datastore.BeginTransactionRequest.serialize, response_deserializer=datastore.BeginTransactionResponse.deserialize, @@ -366,7 +452,7 @@ def commit(self) -> Callable[[datastore.CommitRequest], datastore.CommitResponse # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( + self._stubs["commit"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Commit", request_serializer=datastore.CommitRequest.serialize, response_deserializer=datastore.CommitResponse.deserialize, @@ -392,7 +478,7 @@ def rollback( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( + self._stubs["rollback"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Rollback", request_serializer=datastore.RollbackRequest.serialize, response_deserializer=datastore.RollbackResponse.deserialize, @@ -419,7 +505,7 @@ def allocate_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "allocate_ids" not in self._stubs: - self._stubs["allocate_ids"] = self.grpc_channel.unary_unary( + self._stubs["allocate_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/AllocateIds", request_serializer=datastore.AllocateIdsRequest.serialize, response_deserializer=datastore.AllocateIdsResponse.deserialize, @@ -446,7 +532,7 @@ def reserve_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reserve_ids" not in self._stubs: - self._stubs["reserve_ids"] = self.grpc_channel.unary_unary( + self._stubs["reserve_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/ReserveIds", request_serializer=datastore.ReserveIdsRequest.serialize, response_deserializer=datastore.ReserveIdsResponse.deserialize, @@ -454,7 +540,7 @@ def reserve_ids( return self._stubs["reserve_ids"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -466,7 +552,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -483,7 +569,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -500,7 +586,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -519,7 +605,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 4d943696..aba046cb 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datastore_v1.types import datastore @@ -32,6 +38,82 @@ from .base import DatastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreGrpcAsyncIOTransport(DatastoreTransport): """gRPC AsyncIO backend transport for Datastore. @@ -234,10 +316,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -269,7 +354,7 @@ def lookup( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "lookup" not in self._stubs: - self._stubs["lookup"] = self.grpc_channel.unary_unary( + self._stubs["lookup"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Lookup", request_serializer=datastore.LookupRequest.serialize, response_deserializer=datastore.LookupResponse.deserialize, @@ -295,7 +380,7 @@ def run_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_unary( + self._stubs["run_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunQuery", request_serializer=datastore.RunQueryRequest.serialize, response_deserializer=datastore.RunQueryResponse.deserialize, @@ -324,7 +409,7 @@ def run_aggregation_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self.grpc_channel.unary_unary( + self._stubs["run_aggregation_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunAggregationQuery", request_serializer=datastore.RunAggregationQueryRequest.serialize, response_deserializer=datastore.RunAggregationQueryResponse.deserialize, @@ -353,7 +438,7 @@ def begin_transaction( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + self._stubs["begin_transaction"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/BeginTransaction", request_serializer=datastore.BeginTransactionRequest.serialize, response_deserializer=datastore.BeginTransactionResponse.deserialize, @@ -380,7 +465,7 @@ def commit( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( + self._stubs["commit"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Commit", request_serializer=datastore.CommitRequest.serialize, response_deserializer=datastore.CommitResponse.deserialize, @@ -406,7 +491,7 @@ def rollback( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( + self._stubs["rollback"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Rollback", request_serializer=datastore.RollbackRequest.serialize, response_deserializer=datastore.RollbackResponse.deserialize, @@ -435,7 +520,7 @@ def allocate_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "allocate_ids" not in self._stubs: - self._stubs["allocate_ids"] = self.grpc_channel.unary_unary( + self._stubs["allocate_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/AllocateIds", request_serializer=datastore.AllocateIdsRequest.serialize, response_deserializer=datastore.AllocateIdsResponse.deserialize, @@ -464,7 +549,7 @@ def reserve_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reserve_ids" not in self._stubs: - self._stubs["reserve_ids"] = self.grpc_channel.unary_unary( + self._stubs["reserve_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/ReserveIds", request_serializer=datastore.ReserveIdsRequest.serialize, response_deserializer=datastore.ReserveIdsResponse.deserialize, @@ -582,7 +667,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -598,7 +683,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -615,7 +700,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -632,7 +717,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -651,7 +736,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/google/cloud/datastore_v1/services/datastore/transports/rest.py b/google/cloud/datastore_v1/services/datastore/transports/rest.py index abb7d45c..5bd89407 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -43,11 +44,19 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -137,8 +146,10 @@ def post_run_query(self, response): """ def pre_allocate_ids( - self, request: datastore.AllocateIdsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.AllocateIdsRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.AllocateIdsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.AllocateIdsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for allocate_ids Override in a subclass to manipulate the request or metadata @@ -151,17 +162,42 @@ def post_allocate_ids( ) -> datastore.AllocateIdsResponse: """Post-rpc interceptor for allocate_ids - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_allocate_ids_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_allocate_ids` interceptor runs + before the `post_allocate_ids_with_metadata` interceptor. """ return response + def post_allocate_ids_with_metadata( + self, + response: datastore.AllocateIdsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.AllocateIdsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for allocate_ids + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_allocate_ids_with_metadata` + interceptor in new development instead of the `post_allocate_ids` interceptor. + When both interceptors are used, this `post_allocate_ids_with_metadata` interceptor runs after the + `post_allocate_ids` interceptor. The (possibly modified) response returned by + `post_allocate_ids` will be passed to + `post_allocate_ids_with_metadata`. + """ + return response, metadata + def pre_begin_transaction( self, request: datastore.BeginTransactionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for begin_transaction Override in a subclass to manipulate the request or metadata @@ -174,15 +210,42 @@ def post_begin_transaction( ) -> datastore.BeginTransactionResponse: """Post-rpc interceptor for begin_transaction - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_begin_transaction_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_begin_transaction` interceptor runs + before the `post_begin_transaction_with_metadata` interceptor. """ return response + def post_begin_transaction_with_metadata( + self, + response: datastore.BeginTransactionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.BeginTransactionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_begin_transaction_with_metadata` + interceptor in new development instead of the `post_begin_transaction` interceptor. + When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the + `post_begin_transaction` interceptor. The (possibly modified) response returned by + `post_begin_transaction` will be passed to + `post_begin_transaction_with_metadata`. + """ + return response, metadata + def pre_commit( - self, request: datastore.CommitRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.CommitRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.CommitRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for commit Override in a subclass to manipulate the request or metadata @@ -195,15 +258,40 @@ def post_commit( ) -> datastore.CommitResponse: """Post-rpc interceptor for commit - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_commit_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_commit` interceptor runs + before the `post_commit_with_metadata` interceptor. """ return response + def post_commit_with_metadata( + self, + response: datastore.CommitResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_commit_with_metadata` + interceptor in new development instead of the `post_commit` interceptor. + When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the + `post_commit` interceptor. The (possibly modified) response returned by + `post_commit` will be passed to + `post_commit_with_metadata`. + """ + return response, metadata + def pre_lookup( - self, request: datastore.LookupRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.LookupRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.LookupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.LookupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for lookup Override in a subclass to manipulate the request or metadata @@ -216,15 +304,40 @@ def post_lookup( ) -> datastore.LookupResponse: """Post-rpc interceptor for lookup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_lookup` interceptor runs + before the `post_lookup_with_metadata` interceptor. """ return response + def post_lookup_with_metadata( + self, + response: datastore.LookupResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.LookupResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lookup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_lookup_with_metadata` + interceptor in new development instead of the `post_lookup` interceptor. + When both interceptors are used, this `post_lookup_with_metadata` interceptor runs after the + `post_lookup` interceptor. The (possibly modified) response returned by + `post_lookup` will be passed to + `post_lookup_with_metadata`. + """ + return response, metadata + def pre_reserve_ids( - self, request: datastore.ReserveIdsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.ReserveIdsRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.ReserveIdsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.ReserveIdsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reserve_ids Override in a subclass to manipulate the request or metadata @@ -237,15 +350,40 @@ def post_reserve_ids( ) -> datastore.ReserveIdsResponse: """Post-rpc interceptor for reserve_ids - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reserve_ids_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_reserve_ids` interceptor runs + before the `post_reserve_ids_with_metadata` interceptor. """ return response + def post_reserve_ids_with_metadata( + self, + response: datastore.ReserveIdsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.ReserveIdsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reserve_ids + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_reserve_ids_with_metadata` + interceptor in new development instead of the `post_reserve_ids` interceptor. + When both interceptors are used, this `post_reserve_ids_with_metadata` interceptor runs after the + `post_reserve_ids` interceptor. The (possibly modified) response returned by + `post_reserve_ids` will be passed to + `post_reserve_ids_with_metadata`. + """ + return response, metadata + def pre_rollback( - self, request: datastore.RollbackRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.RollbackRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.RollbackRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for rollback Override in a subclass to manipulate the request or metadata @@ -258,17 +396,42 @@ def post_rollback( ) -> datastore.RollbackResponse: """Post-rpc interceptor for rollback - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_rollback` interceptor runs + before the `post_rollback_with_metadata` interceptor. """ return response + def post_rollback_with_metadata( + self, + response: datastore.RollbackResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RollbackResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_rollback_with_metadata` + interceptor in new development instead of the `post_rollback` interceptor. + When both interceptors are used, this `post_rollback_with_metadata` interceptor runs after the + `post_rollback` interceptor. The (possibly modified) response returned by + `post_rollback` will be passed to + `post_rollback_with_metadata`. + """ + return response, metadata + def pre_run_aggregation_query( self, request: datastore.RunAggregationQueryRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.RunAggregationQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for run_aggregation_query Override in a subclass to manipulate the request or metadata @@ -281,15 +444,42 @@ def post_run_aggregation_query( ) -> datastore.RunAggregationQueryResponse: """Post-rpc interceptor for run_aggregation_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_aggregation_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_run_aggregation_query` interceptor runs + before the `post_run_aggregation_query_with_metadata` interceptor. """ return response + def post_run_aggregation_query_with_metadata( + self, + response: datastore.RunAggregationQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.RunAggregationQueryResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_aggregation_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_run_aggregation_query_with_metadata` + interceptor in new development instead of the `post_run_aggregation_query` interceptor. + When both interceptors are used, this `post_run_aggregation_query_with_metadata` interceptor runs after the + `post_run_aggregation_query` interceptor. The (possibly modified) response returned by + `post_run_aggregation_query` will be passed to + `post_run_aggregation_query_with_metadata`. + """ + return response, metadata + def pre_run_query( - self, request: datastore.RunQueryRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.RunQueryRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.RunQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RunQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for run_query Override in a subclass to manipulate the request or metadata @@ -302,17 +492,42 @@ def post_run_query( ) -> datastore.RunQueryResponse: """Post-rpc interceptor for run_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_run_query` interceptor runs + before the `post_run_query_with_metadata` interceptor. """ return response + def post_run_query_with_metadata( + self, + response: datastore.RunQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RunQueryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_run_query_with_metadata` + interceptor in new development instead of the `post_run_query` interceptor. + When both interceptors are used, this `post_run_query_with_metadata` interceptor runs after the + `post_run_query` interceptor. The (possibly modified) response returned by + `post_run_query` will be passed to + `post_run_query_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -332,8 +547,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -353,8 +570,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -376,8 +595,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -522,7 +743,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.AllocateIdsResponse: r"""Call the allocate ids method over HTTP. @@ -533,8 +754,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.AllocateIdsResponse: @@ -546,6 +769,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseAllocateIds._get_http_options() ) + request, metadata = self._interceptor.pre_allocate_ids(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseAllocateIds._get_transcoded_request( @@ -564,6 +788,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.AllocateIds", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "AllocateIds", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._AllocateIds._get_response( self._host, @@ -585,7 +836,33 @@ def __call__( pb_resp = datastore.AllocateIdsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_allocate_ids(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_allocate_ids_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.AllocateIdsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.allocate_ids", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "AllocateIds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _BeginTransaction( @@ -623,7 +900,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.BeginTransactionResponse: r"""Call the begin transaction method over HTTP. @@ -634,8 +911,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.BeginTransactionResponse: @@ -647,6 +926,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseBeginTransaction._get_http_options() ) + request, metadata = self._interceptor.pre_begin_transaction( request, metadata ) @@ -663,6 +943,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.BeginTransaction", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "BeginTransaction", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._BeginTransaction._get_response( self._host, @@ -684,7 +991,35 @@ def __call__( pb_resp = datastore.BeginTransactionResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_begin_transaction(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_begin_transaction_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.BeginTransactionResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.begin_transaction", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "BeginTransaction", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Commit(_BaseDatastoreRestTransport._BaseCommit, DatastoreRestStub): @@ -720,7 +1055,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.CommitResponse: r"""Call the commit method over HTTP. @@ -731,8 +1066,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.CommitResponse: @@ -742,6 +1079,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseCommit._get_http_options() + request, metadata = self._interceptor.pre_commit(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseCommit._get_transcoded_request( @@ -760,6 +1098,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.Commit", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Commit", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._Commit._get_response( self._host, @@ -781,7 +1146,33 @@ def __call__( pb_resp = datastore.CommitResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.CommitResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.commit", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Commit", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Lookup(_BaseDatastoreRestTransport._BaseLookup, DatastoreRestStub): @@ -817,7 +1208,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.LookupResponse: r"""Call the lookup method over HTTP. @@ -828,8 +1219,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.LookupResponse: @@ -839,6 +1232,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseLookup._get_http_options() + request, metadata = self._interceptor.pre_lookup(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseLookup._get_transcoded_request( @@ -857,6 +1251,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.Lookup", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Lookup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._Lookup._get_response( self._host, @@ -878,7 +1299,33 @@ def __call__( pb_resp = datastore.LookupResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.LookupResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.lookup", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Lookup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ReserveIds(_BaseDatastoreRestTransport._BaseReserveIds, DatastoreRestStub): @@ -914,7 +1361,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.ReserveIdsResponse: r"""Call the reserve ids method over HTTP. @@ -925,8 +1372,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.ReserveIdsResponse: @@ -938,6 +1387,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseReserveIds._get_http_options() ) + request, metadata = self._interceptor.pre_reserve_ids(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseReserveIds._get_transcoded_request( @@ -956,6 +1406,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.ReserveIds", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ReserveIds", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._ReserveIds._get_response( self._host, @@ -977,7 +1454,33 @@ def __call__( pb_resp = datastore.ReserveIdsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reserve_ids(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reserve_ids_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.ReserveIdsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.reserve_ids", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ReserveIds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Rollback(_BaseDatastoreRestTransport._BaseRollback, DatastoreRestStub): @@ -1013,7 +1516,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RollbackResponse: r"""Call the rollback method over HTTP. @@ -1024,8 +1527,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.RollbackResponse: @@ -1036,6 +1541,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseRollback._get_http_options() + request, metadata = self._interceptor.pre_rollback(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseRollback._get_transcoded_request( @@ -1054,6 +1560,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.Rollback", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Rollback", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._Rollback._get_response( self._host, @@ -1075,7 +1608,33 @@ def __call__( pb_resp = datastore.RollbackResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.RollbackResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.rollback", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Rollback", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RunAggregationQuery( @@ -1113,7 +1672,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Call the run aggregation query method over HTTP. @@ -1124,8 +1683,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.RunAggregationQueryResponse: @@ -1137,6 +1698,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_http_options() ) + request, metadata = self._interceptor.pre_run_aggregation_query( request, metadata ) @@ -1153,6 +1715,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.RunAggregationQuery", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunAggregationQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._RunAggregationQuery._get_response( self._host, @@ -1174,7 +1763,35 @@ def __call__( pb_resp = datastore.RunAggregationQueryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_aggregation_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_aggregation_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.RunAggregationQueryResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.run_aggregation_query", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunAggregationQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RunQuery(_BaseDatastoreRestTransport._BaseRunQuery, DatastoreRestStub): @@ -1210,7 +1827,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunQueryResponse: r"""Call the run query method over HTTP. @@ -1221,8 +1838,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.RunQueryResponse: @@ -1232,6 +1851,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseRunQuery._get_http_options() + request, metadata = self._interceptor.pre_run_query(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseRunQuery._get_transcoded_request( @@ -1250,6 +1870,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.RunQuery", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._RunQuery._get_response( self._host, @@ -1271,7 +1918,33 @@ def __call__( pb_resp = datastore.RunQueryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.RunQueryResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.run_query", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1376,7 +2049,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1386,13 +2059,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreRestTransport._BaseCancelOperation._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) @@ -1407,6 +2083,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.CancelOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._CancelOperation._get_response( self._host, @@ -1462,7 +2165,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -1472,13 +2175,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreRestTransport._BaseDeleteOperation._get_http_options() ) + request, metadata = self._interceptor.pre_delete_operation( request, metadata ) @@ -1493,6 +2199,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.DeleteOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._DeleteOperation._get_response( self._host, @@ -1548,7 +2281,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1558,8 +2291,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -1568,6 +2303,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseGetOperation._get_http_options() ) + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseGetOperation._get_transcoded_request( @@ -1582,6 +2318,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.GetOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._GetOperation._get_response( self._host, @@ -1601,6 +2364,27 @@ def __call__( resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreAsyncClient.GetOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1641,7 +2425,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1651,8 +2435,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -1661,6 +2447,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseListOperations._get_http_options() ) + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseListOperations._get_transcoded_request( @@ -1675,6 +2462,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.ListOperations", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._ListOperations._get_response( self._host, @@ -1694,6 +2508,27 @@ def __call__( resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreAsyncClient.ListOperations", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index f880423c..f05f7e4c 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -73,6 +73,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -331,83 +339,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatastoreAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatastoreAdminClient(credentials=cred) + client._transport._credentials = cred - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -3171,6 +3142,7 @@ def test_export_entities_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_entities(request) @@ -3225,6 +3197,7 @@ def test_export_entities_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_entities(**mock_args) @@ -3363,6 +3336,7 @@ def test_import_entities_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_entities(request) @@ -3417,6 +3391,7 @@ def test_import_entities_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_entities(**mock_args) @@ -4094,6 +4069,7 @@ def test_export_entities_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_entities(request) @@ -4124,6 +4100,7 @@ def test_export_entities_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_entities(request) # Establish that the response is the type that we expect. @@ -4149,10 +4126,13 @@ def test_export_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_export_entities" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_export_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_export_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.ExportEntitiesRequest.pb( datastore_admin.ExportEntitiesRequest() ) @@ -4165,6 +4145,7 @@ def test_export_entities_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4175,6 +4156,7 @@ def test_export_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_entities( request, @@ -4186,6 +4168,7 @@ def test_export_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_entities_rest_bad_request( @@ -4209,6 +4192,7 @@ def test_import_entities_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_entities(request) @@ -4239,6 +4223,7 @@ def test_import_entities_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_entities(request) # Establish that the response is the type that we expect. @@ -4264,10 +4249,13 @@ def test_import_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_import_entities" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_import_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_import_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.ImportEntitiesRequest.pb( datastore_admin.ImportEntitiesRequest() ) @@ -4280,6 +4268,7 @@ def test_import_entities_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4290,6 +4279,7 @@ def test_import_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_entities( request, @@ -4301,6 +4291,7 @@ def test_import_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_index_rest_bad_request(request_type=datastore_admin.CreateIndexRequest): @@ -4322,6 +4313,7 @@ def test_create_index_rest_bad_request(request_type=datastore_admin.CreateIndexR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_index(request) @@ -4427,6 +4419,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_index(request) # Establish that the response is the type that we expect. @@ -4452,10 +4445,13 @@ def test_create_index_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_create_index" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_create_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_create_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.CreateIndexRequest.pb( datastore_admin.CreateIndexRequest() ) @@ -4468,6 +4464,7 @@ def test_create_index_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4478,6 +4475,7 @@ def test_create_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_index( request, @@ -4489,6 +4487,7 @@ def test_create_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_index_rest_bad_request(request_type=datastore_admin.DeleteIndexRequest): @@ -4510,6 +4509,7 @@ def test_delete_index_rest_bad_request(request_type=datastore_admin.DeleteIndexR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_index(request) @@ -4540,6 +4540,7 @@ def test_delete_index_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_index(request) # Establish that the response is the type that we expect. @@ -4565,10 +4566,13 @@ def test_delete_index_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_delete_index" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_delete_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_delete_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.DeleteIndexRequest.pb( datastore_admin.DeleteIndexRequest() ) @@ -4581,6 +4585,7 @@ def test_delete_index_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4591,6 +4596,7 @@ def test_delete_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_index( request, @@ -4602,6 +4608,7 @@ def test_delete_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_index_rest_bad_request(request_type=datastore_admin.GetIndexRequest): @@ -4623,6 +4630,7 @@ def test_get_index_rest_bad_request(request_type=datastore_admin.GetIndexRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_index(request) @@ -4662,6 +4670,7 @@ def test_get_index_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_index(request) # Establish that the response is the type that we expect. @@ -4690,10 +4699,13 @@ def test_get_index_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_get_index" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_get_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_get_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.GetIndexRequest.pb( datastore_admin.GetIndexRequest() ) @@ -4706,6 +4718,7 @@ def test_get_index_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = index.Index.to_json(index.Index()) req.return_value.content = return_value @@ -4716,6 +4729,7 @@ def test_get_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = index.Index() + post_with_metadata.return_value = index.Index(), metadata client.get_index( request, @@ -4727,6 +4741,7 @@ def test_get_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_indexes_rest_bad_request(request_type=datastore_admin.ListIndexesRequest): @@ -4748,6 +4763,7 @@ def test_list_indexes_rest_bad_request(request_type=datastore_admin.ListIndexesR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_indexes(request) @@ -4783,6 +4799,7 @@ def test_list_indexes_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_indexes(request) # Establish that the response is the type that we expect. @@ -4807,10 +4824,13 @@ def test_list_indexes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_list_indexes" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_list_indexes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_list_indexes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.ListIndexesRequest.pb( datastore_admin.ListIndexesRequest() ) @@ -4823,6 +4843,7 @@ def test_list_indexes_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore_admin.ListIndexesResponse.to_json( datastore_admin.ListIndexesResponse() ) @@ -4835,6 +4856,10 @@ def test_list_indexes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore_admin.ListIndexesResponse() + post_with_metadata.return_value = ( + datastore_admin.ListIndexesResponse(), + metadata, + ) client.list_indexes( request, @@ -4846,6 +4871,7 @@ def test_list_indexes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( @@ -4871,6 +4897,7 @@ def test_cancel_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -4901,6 +4928,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -4931,6 +4959,7 @@ def test_delete_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -4961,6 +4990,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -4991,6 +5021,7 @@ def test_get_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -5021,6 +5052,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -5049,6 +5081,7 @@ def test_list_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -5079,6 +5112,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 8a28ba74..7f0755a8 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -70,6 +70,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -303,83 +311,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), - (DatastoreClient, transports.DatastoreRestTransport, "rest"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatastoreClient(credentials=cred) + client._transport._credentials = cred - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -3357,6 +3328,7 @@ def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.lookup(request) @@ -3418,6 +3390,7 @@ def test_lookup_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.lookup(**mock_args) @@ -3554,6 +3527,7 @@ def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_query(request) @@ -3679,6 +3653,7 @@ def test_run_aggregation_query_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_aggregation_query(request) @@ -3801,6 +3776,7 @@ def test_begin_transaction_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.begin_transaction(request) @@ -3846,6 +3822,7 @@ def test_begin_transaction_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.begin_transaction(**mock_args) @@ -3975,6 +3952,7 @@ def test_commit_rest_required_fields(request_type=datastore.CommitRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit(request) @@ -4032,6 +4010,7 @@ def test_commit_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit(**mock_args) @@ -4177,6 +4156,7 @@ def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback(request) @@ -4231,6 +4211,7 @@ def test_rollback_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback(**mock_args) @@ -4360,6 +4341,7 @@ def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.allocate_ids(request) @@ -4418,6 +4400,7 @@ def test_allocate_ids_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.allocate_ids(**mock_args) @@ -4551,6 +4534,7 @@ def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reserve_ids(request) @@ -4609,6 +4593,7 @@ def test_reserve_ids_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reserve_ids(**mock_args) @@ -6041,6 +6026,7 @@ def test_lookup_rest_bad_request(request_type=datastore.LookupRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.lookup(request) @@ -6076,6 +6062,7 @@ def test_lookup_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.lookup(request) # Establish that the response is the type that we expect. @@ -6098,10 +6085,13 @@ def test_lookup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_lookup" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_lookup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_lookup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.LookupRequest.pb(datastore.LookupRequest()) transcode.return_value = { "method": "post", @@ -6112,6 +6102,7 @@ def test_lookup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.LookupResponse.to_json(datastore.LookupResponse()) req.return_value.content = return_value @@ -6122,6 +6113,7 @@ def test_lookup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.LookupResponse() + post_with_metadata.return_value = datastore.LookupResponse(), metadata client.lookup( request, @@ -6133,6 +6125,7 @@ def test_lookup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_query_rest_bad_request(request_type=datastore.RunQueryRequest): @@ -6154,6 +6147,7 @@ def test_run_query_rest_bad_request(request_type=datastore.RunQueryRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.run_query(request) @@ -6189,6 +6183,7 @@ def test_run_query_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_query(request) # Establish that the response is the type that we expect. @@ -6211,10 +6206,13 @@ def test_run_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_run_query" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_run_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest()) transcode.return_value = { "method": "post", @@ -6225,6 +6223,7 @@ def test_run_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.RunQueryResponse.to_json(datastore.RunQueryResponse()) req.return_value.content = return_value @@ -6235,6 +6234,7 @@ def test_run_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.RunQueryResponse() + post_with_metadata.return_value = datastore.RunQueryResponse(), metadata client.run_query( request, @@ -6246,6 +6246,7 @@ def test_run_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_aggregation_query_rest_bad_request( @@ -6269,6 +6270,7 @@ def test_run_aggregation_query_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.run_aggregation_query(request) @@ -6304,6 +6306,7 @@ def test_run_aggregation_query_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_aggregation_query(request) # Establish that the response is the type that we expect. @@ -6326,10 +6329,13 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_run_aggregation_query" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_aggregation_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_run_aggregation_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.RunAggregationQueryRequest.pb( datastore.RunAggregationQueryRequest() ) @@ -6342,6 +6348,7 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.RunAggregationQueryResponse.to_json( datastore.RunAggregationQueryResponse() ) @@ -6354,6 +6361,10 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.RunAggregationQueryResponse() + post_with_metadata.return_value = ( + datastore.RunAggregationQueryResponse(), + metadata, + ) client.run_aggregation_query( request, @@ -6365,6 +6376,7 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_begin_transaction_rest_bad_request( @@ -6388,6 +6400,7 @@ def test_begin_transaction_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.begin_transaction(request) @@ -6423,6 +6436,7 @@ def test_begin_transaction_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.begin_transaction(request) # Establish that the response is the type that we expect. @@ -6445,10 +6459,13 @@ def test_begin_transaction_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_begin_transaction" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_begin_transaction_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_begin_transaction" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.BeginTransactionRequest.pb( datastore.BeginTransactionRequest() ) @@ -6461,6 +6478,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.BeginTransactionResponse.to_json( datastore.BeginTransactionResponse() ) @@ -6473,6 +6491,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.BeginTransactionResponse() + post_with_metadata.return_value = datastore.BeginTransactionResponse(), metadata client.begin_transaction( request, @@ -6484,6 +6503,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_commit_rest_bad_request(request_type=datastore.CommitRequest): @@ -6505,6 +6525,7 @@ def test_commit_rest_bad_request(request_type=datastore.CommitRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit(request) @@ -6540,6 +6561,7 @@ def test_commit_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit(request) # Establish that the response is the type that we expect. @@ -6562,10 +6584,13 @@ def test_commit_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_commit" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_commit_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_commit" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.CommitRequest.pb(datastore.CommitRequest()) transcode.return_value = { "method": "post", @@ -6576,6 +6601,7 @@ def test_commit_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.CommitResponse.to_json(datastore.CommitResponse()) req.return_value.content = return_value @@ -6586,6 +6612,7 @@ def test_commit_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.CommitResponse() + post_with_metadata.return_value = datastore.CommitResponse(), metadata client.commit( request, @@ -6597,6 +6624,7 @@ def test_commit_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_rest_bad_request(request_type=datastore.RollbackRequest): @@ -6618,6 +6646,7 @@ def test_rollback_rest_bad_request(request_type=datastore.RollbackRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback(request) @@ -6651,6 +6680,7 @@ def test_rollback_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback(request) # Establish that the response is the type that we expect. @@ -6672,10 +6702,13 @@ def test_rollback_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_rollback" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_rollback_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_rollback" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest()) transcode.return_value = { "method": "post", @@ -6686,6 +6719,7 @@ def test_rollback_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.RollbackResponse.to_json(datastore.RollbackResponse()) req.return_value.content = return_value @@ -6696,6 +6730,7 @@ def test_rollback_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.RollbackResponse() + post_with_metadata.return_value = datastore.RollbackResponse(), metadata client.rollback( request, @@ -6707,6 +6742,7 @@ def test_rollback_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_allocate_ids_rest_bad_request(request_type=datastore.AllocateIdsRequest): @@ -6728,6 +6764,7 @@ def test_allocate_ids_rest_bad_request(request_type=datastore.AllocateIdsRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.allocate_ids(request) @@ -6761,6 +6798,7 @@ def test_allocate_ids_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.allocate_ids(request) # Establish that the response is the type that we expect. @@ -6782,10 +6820,13 @@ def test_allocate_ids_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_allocate_ids" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_allocate_ids_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_allocate_ids" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest()) transcode.return_value = { "method": "post", @@ -6796,6 +6837,7 @@ def test_allocate_ids_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.AllocateIdsResponse.to_json( datastore.AllocateIdsResponse() ) @@ -6808,6 +6850,7 @@ def test_allocate_ids_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.AllocateIdsResponse() + post_with_metadata.return_value = datastore.AllocateIdsResponse(), metadata client.allocate_ids( request, @@ -6819,6 +6862,7 @@ def test_allocate_ids_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reserve_ids_rest_bad_request(request_type=datastore.ReserveIdsRequest): @@ -6840,6 +6884,7 @@ def test_reserve_ids_rest_bad_request(request_type=datastore.ReserveIdsRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reserve_ids(request) @@ -6873,6 +6918,7 @@ def test_reserve_ids_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reserve_ids(request) # Establish that the response is the type that we expect. @@ -6894,10 +6940,13 @@ def test_reserve_ids_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_reserve_ids" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_reserve_ids_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_reserve_ids" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest()) transcode.return_value = { "method": "post", @@ -6908,6 +6957,7 @@ def test_reserve_ids_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.ReserveIdsResponse.to_json( datastore.ReserveIdsResponse() ) @@ -6920,6 +6970,7 @@ def test_reserve_ids_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.ReserveIdsResponse() + post_with_metadata.return_value = datastore.ReserveIdsResponse(), metadata client.reserve_ids( request, @@ -6931,6 +6982,7 @@ def test_reserve_ids_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( @@ -6956,6 +7008,7 @@ def test_cancel_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -6986,6 +7039,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -7016,6 +7070,7 @@ def test_delete_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -7046,6 +7101,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -7076,6 +7132,7 @@ def test_get_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -7106,6 +7163,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -7134,6 +7192,7 @@ def test_list_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -7164,6 +7223,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) From 7c1171bf657f7cf4d1404e19611f6c874a8998ca Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 17 Mar 2025 11:07:42 -0400 Subject: [PATCH 05/10] fix: Allow protobuf 6.x (#598) --- setup.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index 0534aa7c..cc91b069 100644 --- a/setup.py +++ b/setup.py @@ -29,15 +29,15 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "google-cloud-core >= 1.4.0, <3.0.0dev", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-core >= 1.4.0, <3.0.0", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} From a7df08060de6d729ef125efa12126dc32ea85006 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:51:59 -0400 Subject: [PATCH 06/10] chore: Update gapic-generator-python to 1.23.6 (#602) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.23.6 PiperOrigin-RevId: 738170370 Source-Link: https://github.com/googleapis/googleapis/commit/3f1e17aa2dec3f146a9a2a8a64c5c6d19d0b6e15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9afd8c33d4cae610b75fa4999264ea8c8c66b9d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWFmZDhjMzNkNGNhZTYxMGI3NWZhNDk5OTI2NGVhOGM4YzY2YjlkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/datastore_admin/__init__.py | 2 +- google/cloud/datastore_admin_v1/__init__.py | 2 +- google/cloud/datastore_admin_v1/services/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/async_client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/pagers.py | 2 +- .../services/datastore_admin/transports/__init__.py | 2 +- .../services/datastore_admin/transports/base.py | 2 +- .../services/datastore_admin/transports/grpc.py | 2 +- .../services/datastore_admin/transports/grpc_asyncio.py | 2 +- .../services/datastore_admin/transports/rest.py | 2 +- .../services/datastore_admin/transports/rest_base.py | 2 +- google/cloud/datastore_admin_v1/types/__init__.py | 2 +- google/cloud/datastore_admin_v1/types/datastore_admin.py | 2 +- google/cloud/datastore_admin_v1/types/index.py | 2 +- google/cloud/datastore_admin_v1/types/migration.py | 2 +- google/cloud/datastore_v1/__init__.py | 2 +- google/cloud/datastore_v1/services/__init__.py | 2 +- google/cloud/datastore_v1/services/datastore/__init__.py | 2 +- google/cloud/datastore_v1/services/datastore/async_client.py | 2 +- google/cloud/datastore_v1/services/datastore/client.py | 2 +- .../datastore_v1/services/datastore/transports/__init__.py | 2 +- google/cloud/datastore_v1/services/datastore/transports/base.py | 2 +- google/cloud/datastore_v1/services/datastore/transports/grpc.py | 2 +- .../datastore_v1/services/datastore/transports/grpc_asyncio.py | 2 +- google/cloud/datastore_v1/services/datastore/transports/rest.py | 2 +- .../datastore_v1/services/datastore/transports/rest_base.py | 2 +- google/cloud/datastore_v1/types/__init__.py | 2 +- google/cloud/datastore_v1/types/aggregation_result.py | 2 +- google/cloud/datastore_v1/types/datastore.py | 2 +- google/cloud/datastore_v1/types/entity.py | 2 +- google/cloud/datastore_v1/types/query.py | 2 +- google/cloud/datastore_v1/types/query_profile.py | 2 +- scripts/fixup_datastore_admin_v1_keywords.py | 2 +- scripts/fixup_datastore_v1_keywords.py | 2 +- tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- tests/unit/gapic/datastore_admin_v1/__init__.py | 2 +- tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py | 2 +- tests/unit/gapic/datastore_v1/__init__.py | 2 +- tests/unit/gapic/datastore_v1/test_datastore.py | 2 +- 43 files changed, 43 insertions(+), 43 deletions(-) diff --git a/google/cloud/datastore_admin/__init__.py b/google/cloud/datastore_admin/__init__.py index 09b75aef..9009817c 100644 --- a/google/cloud/datastore_admin/__init__.py +++ b/google/cloud/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/__init__.py b/google/cloud/datastore_admin_v1/__init__.py index 6d57bbb9..4f25734e 100644 --- a/google/cloud/datastore_admin_v1/__init__.py +++ b/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/__init__.py b/google/cloud/datastore_admin_v1/services/__init__.py index 8f6cf068..cbf94b28 100644 --- a/google/cloud/datastore_admin_v1/services/__init__.py +++ b/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index be83caf7..011b1b84 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 245f8fb1..fbb93fac 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 489995bc..012344bf 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index 4c0fa8a5..046862c5 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index f3b7656e..a8b80cff 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index bcfb2688..89bb1a70 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 49828797..4d5c7ce0 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index f895032d..5ba9ac52 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 1cdfd4b9..82644569 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py index a94eece5..b36139a6 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/__init__.py b/google/cloud/datastore_admin_v1/types/__init__.py index ca082a05..ef6c33ec 100644 --- a/google/cloud/datastore_admin_v1/types/__init__.py +++ b/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/datastore_admin.py b/google/cloud/datastore_admin_v1/types/datastore_admin.py index eb838570..f6251eb8 100644 --- a/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/index.py b/google/cloud/datastore_admin_v1/types/index.py index 77a7079d..ef74f896 100644 --- a/google/cloud/datastore_admin_v1/types/index.py +++ b/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/migration.py b/google/cloud/datastore_admin_v1/types/migration.py index ec69e941..68447560 100644 --- a/google/cloud/datastore_admin_v1/types/migration.py +++ b/google/cloud/datastore_admin_v1/types/migration.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py index a417fe1e..2f60c07f 100644 --- a/google/cloud/datastore_v1/__init__.py +++ b/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/__init__.py b/google/cloud/datastore_v1/services/__init__.py index 8f6cf068..cbf94b28 100644 --- a/google/cloud/datastore_v1/services/__init__.py +++ b/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/__init__.py b/google/cloud/datastore_v1/services/datastore/__init__.py index e992abb3..271de072 100644 --- a/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index 88de0d08..97002490 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index aad9d820..34427229 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 727e271c..719fb699 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index cb18d369..6070ee14 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 6306c219..19592726 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index aba046cb..e6749231 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/rest.py b/google/cloud/datastore_v1/services/datastore/transports/rest.py index 5bd89407..274a4a60 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/rest_base.py b/google/cloud/datastore_v1/services/datastore/transports/rest_base.py index c8d5c675..437886be 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/rest_base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py index 0efe33ff..ea54d541 100644 --- a/google/cloud/datastore_v1/types/__init__.py +++ b/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/aggregation_result.py b/google/cloud/datastore_v1/types/aggregation_result.py index b35ca1f9..2dbfb36a 100644 --- a/google/cloud/datastore_v1/types/aggregation_result.py +++ b/google/cloud/datastore_v1/types/aggregation_result.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index 281866f5..6f74989b 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/entity.py b/google/cloud/datastore_v1/types/entity.py index 5c5bcdc4..7a3320ce 100644 --- a/google/cloud/datastore_v1/types/entity.py +++ b/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py index 1f8679cc..7f2da72b 100644 --- a/google/cloud/datastore_v1/types/query.py +++ b/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/query_profile.py b/google/cloud/datastore_v1/types/query_profile.py index 8dca0f6e..bf43fd62 100644 --- a/google/cloud/datastore_v1/types/query_profile.py +++ b/google/cloud/datastore_v1/types/query_profile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_datastore_admin_v1_keywords.py b/scripts/fixup_datastore_admin_v1_keywords.py index 2f999e1e..409cafd3 100644 --- a/scripts/fixup_datastore_admin_v1_keywords.py +++ b/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_datastore_v1_keywords.py b/scripts/fixup_datastore_v1_keywords.py index 661d509b..21c85f83 100644 --- a/scripts/fixup_datastore_v1_keywords.py +++ b/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/__init__.py b/tests/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_admin_v1/__init__.py b/tests/unit/gapic/datastore_admin_v1/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index f05f7e4c..540daec2 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_v1/__init__.py b/tests/unit/gapic/datastore_v1/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/gapic/datastore_v1/__init__.py +++ b/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 7f0755a8..03320012 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ed92e8e54a9e0f44302efee89a30a322d0a73636 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 24 Mar 2025 13:54:39 -0700 Subject: [PATCH 07/10] fix: backwards-compatibility for previous meaning format (#603) --- google/cloud/datastore/helpers.py | 11 ++++++- tests/unit/test_helpers.py | 55 +++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 1 deletion(-) diff --git a/google/cloud/datastore/helpers.py b/google/cloud/datastore/helpers.py index d491360c..cdce291c 100644 --- a/google/cloud/datastore/helpers.py +++ b/google/cloud/datastore/helpers.py @@ -18,6 +18,7 @@ """ import datetime +import itertools from google.protobuf import struct_pb2 from google.type import latlng_pb2 @@ -182,7 +183,15 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): return elif is_list: # for lists, set meaning on the root pb and on each sub-element - root_meaning, sub_meaning_list = meaning + if isinstance(meaning, tuple): + root_meaning, sub_meaning_list = meaning + else: + # if meaning isn't a tuple, fall back to pre-v2.20.2 meaning format + root_meaning = None + if isinstance(meaning, list): + sub_meaning_list = meaning + else: + sub_meaning_list = itertools.repeat(meaning) if root_meaning is not None: value_pb.meaning = root_meaning if sub_meaning_list: diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index a6f63a80..710a6cb7 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -1318,6 +1318,61 @@ def test__set_pb_meaning_w_value_unset(orig_meaning): assert value_pb.meaning == orig_meaning +def test__set_pb_meaning_w_list_and_single_value(): + """ + v2.20.2 uses a tuple to represent list meanings (https://github.com/googleapis/python-datastore/pull/575) + + This check ensures _set_pb_meaning_from_entity is backwards + compatible with the old meaning style, still used by python-ndb + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _set_pb_meaning_from_entity + from google.cloud.datastore.entity import Entity + + orig_root_meaning = 1 + updated_meaning = 22 + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = orig_root_meaning + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + entity = Entity(key="key") + entity._meanings = {"value": (updated_meaning, None)} + _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=True) + assert value_pb.meaning == orig_root_meaning + assert sub_value_pb1.meaning == updated_meaning + assert sub_value_pb2.meaning == updated_meaning + + +def test__set_pb_meaning_w_list_and_list(): + """ + v2.20.2 uses a tuple to represent list meanings (https://github.com/googleapis/python-datastore/pull/575) + + This check ensures _set_pb_meaning_from_entity is backwards + compatible with the old meaning style, still used by python-ndb + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _set_pb_meaning_from_entity + from google.cloud.datastore.entity import Entity + + orig_root_meaning = 1 + updated_meaning_1 = 12 + updated_meaning_2 = 4 + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = orig_root_meaning + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + entity = Entity(key="key") + entity._meanings = {"value": ([updated_meaning_1, updated_meaning_2], None)} + _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=True) + assert value_pb.meaning == orig_root_meaning + assert sub_value_pb1.meaning == updated_meaning_1 + assert sub_value_pb2.meaning == updated_meaning_2 + + def test__array_w_meaning_end_to_end(): """ Test proto->entity->proto with an array with a meaning field From 16b9c737abfda1f98a871b4080528ac8f61f9abc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 12:11:23 -0400 Subject: [PATCH 08/10] chore(python): fix incorrect import statement in README (#604) Source-Link: https://github.com/googleapis/synthtool/commit/87677404f85cee860588ebe2c352d0609f683d5d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- README.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3f7634f2..c4e82889 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c +# created: 2025-03-31T16:51:40.130756953Z diff --git a/README.rst b/README.rst index dc21a4e8..f458af4a 100644 --- a/README.rst +++ b/README.rst @@ -166,7 +166,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google") base_logger.addHandler(logging.StreamHandler()) @@ -178,7 +178,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google.cloud.library_v1") base_logger.addHandler(logging.StreamHandler()) From d3919110b49e7389ad3f4e8bbb55b104417457fa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 15:00:12 -0400 Subject: [PATCH 09/10] chore(python): remove CONTRIBUTING.rst from templates (#605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): remove CONTRIBUTING.rst from templates Source-Link: https://github.com/googleapis/synthtool/commit/c96fb118e03c2b50d50fe17c1d0845479a0cfa9a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove replacement in owlbot.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- owlbot.py | 42 --------------------------------------- 2 files changed, 2 insertions(+), 44 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c4e82889..8bc6405e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c -# created: 2025-03-31T16:51:40.130756953Z + digest: sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab +# created: 2025-04-10T17:48:54.829145676Z diff --git a/owlbot.py b/owlbot.py index 9fcf0e15..cbb0f8ae 100644 --- a/owlbot.py +++ b/owlbot.py @@ -211,48 +211,6 @@ def docfx(session): r"Dict[str, str]", ) -# Add documentation about creating indexes and populating data for system -# tests. -assert 1 == s.replace( - "CONTRIBUTING.rst", - r""" -\*\*\*\*\*\*\*\*\*\*\*\*\* -Test Coverage -\*\*\*\*\*\*\*\*\*\*\*\*\* -""", - """ -- You'll need to create composite - `indexes `__ - with the ``gcloud`` command line - `tool `__:: - - # Install the app (App Engine Command Line Interface) component. - $ gcloud components install app-engine-python - - # Authenticate the gcloud tool with your account. - $ GOOGLE_APPLICATION_CREDENTIALS="path/to/app_credentials.json" - $ gcloud auth activate-service-account \ - > --key-file=${GOOGLE_APPLICATION_CREDENTIALS} - - # Create the indexes - $ gcloud datastore indexes create tests/system/index.yaml - -- You'll also need stored data in your dataset. To populate this data, run:: - - $ python tests/system/utils/populate_datastore.py - -- If you make a mistake during development (i.e. a failing test that - prevents clean-up) you can clear all system test data from your - datastore instance via:: - - $ python tests/system/utils/clear_datastore.py - -************* -Test Coverage -************* -""", -) - # add type checker nox session s.replace( "noxfile.py", From 1dafc68d897ec771fc3921124e1619c4fb6b34bf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 17:59:08 -0700 Subject: [PATCH 10/10] chore(main): release 2.21.0 (#595) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 15 +++++++++++++++ google/cloud/datastore/gapic_version.py | 2 +- google/cloud/datastore/version.py | 2 +- google/cloud/datastore_admin/gapic_version.py | 2 +- google/cloud/datastore_admin_v1/gapic_version.py | 2 +- google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 21 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index eeb4bcda..5be20145 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.20.2" + ".": "2.21.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 549b44f4..0ed89fd5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.21.0](https://github.com/googleapis/python-datastore/compare/v2.20.2...v2.21.0) (2025-04-10) + + +### Features + +* Add REST Interceptors which support reading metadata ([7be9c4c](https://github.com/googleapis/python-datastore/commit/7be9c4c594af2c2414e394b8bfe62574b58ef337)) +* Add support for opt-in debug logging ([7be9c4c](https://github.com/googleapis/python-datastore/commit/7be9c4c594af2c2414e394b8bfe62574b58ef337)) + + +### Bug Fixes + +* Allow protobuf 6.x ([#598](https://github.com/googleapis/python-datastore/issues/598)) ([7c1171b](https://github.com/googleapis/python-datastore/commit/7c1171bf657f7cf4d1404e19611f6c874a8998ca)) +* Backwards-compatibility for previous meaning format ([#603](https://github.com/googleapis/python-datastore/issues/603)) ([ed92e8e](https://github.com/googleapis/python-datastore/commit/ed92e8e54a9e0f44302efee89a30a322d0a73636)) +* Fix typing issue with gRPC metadata when key ends in -bin ([7be9c4c](https://github.com/googleapis/python-datastore/commit/7be9c4c594af2c2414e394b8bfe62574b58ef337)) + ## [2.20.2](https://github.com/googleapis/python-datastore/compare/v2.20.1...v2.20.2) (2024-12-12) diff --git a/google/cloud/datastore/gapic_version.py b/google/cloud/datastore/gapic_version.py index 6975b43d..81570778 100644 --- a/google/cloud/datastore/gapic_version.py +++ b/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py index e37230a6..563b0e16 100644 --- a/google/cloud/datastore/version.py +++ b/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" +__version__ = "2.21.0" diff --git a/google/cloud/datastore_admin/gapic_version.py b/google/cloud/datastore_admin/gapic_version.py index 4c1787c5..e546bae0 100644 --- a/google/cloud/datastore_admin/gapic_version.py +++ b/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/google/cloud/datastore_admin_v1/gapic_version.py b/google/cloud/datastore_admin_v1/gapic_version.py index efd18799..6e29ec5f 100644 --- a/google/cloud/datastore_admin_v1/gapic_version.py +++ b/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/google/cloud/datastore_v1/gapic_version.py b/google/cloud/datastore_v1/gapic_version.py index efd18799..6e29ec5f 100644 --- a/google/cloud/datastore_v1/gapic_version.py +++ b/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version}